def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--infile', type=str, default=None, required=True, help='path of DESI exposure frame fits file') parser.add_argument('--fibermap', type=str, default=None, required=True, help='path of DESI exposure frame fits file') parser.add_argument('--fiberflat', type=str, default=None, required=True, help='path of DESI fiberflat fits file') parser.add_argument('--outfile', type=str, default=None, required=True, help='path of DESI sky fits file') args = parser.parse_args() log = get_logger() log.info("starting") # read exposure to load data and get range of spectra frame = read_frame(args.infile) specmin = frame.header["SPECMIN"] specmax = frame.header["SPECMAX"] # read fibermap to locate sky fibers fibermap = read_fibermap(args.fibermap) selection = np.where((fibermap["OBJTYPE"] == "SKY") & (fibermap["FIBER"] >= specmin) & (fibermap["FIBER"] <= specmax))[0] if selection.size == 0: log.error("no sky fiber in fibermap %s" % args.fibermap) sys.exit(12) # read fiberflat fiberflat = read_fiberflat(args.fiberflat) # apply fiberflat to sky fibers apply_fiberflat(frame, fiberflat) # compute sky model skymodel = compute_sky(frame, fibermap) # write result write_sky(args.outfile, skymodel, frame.header) log.info("successfully wrote %s" % args.outfile)
def load_s2n_values(objtype, nights, channel, sub_exposures=None): fdict = dict(waves=[], s2n=[], fluxes=[], exptime=[], OII=[]) for night in nights: if sub_exposures is not None: exposures = sub_exposures else: exposures = get_exposures(night) #, raw=True) for exposure in exposures: fibermap_path = findfile(filetype='fibermap', night=night, expid=exposure) fibermap_data = read_fibermap(fibermap_path) flavor = fibermap_data.meta['FLAVOR'] if flavor.lower() in ('arc', 'flat', 'bias'): log.debug('Skipping calibration {} exposure {:08d}'.format( flavor, exposure)) continue # Load simspec simspec_file = fibermap_path.replace('fibermap', 'simspec') sps_hdu = fits.open(simspec_file) sps_tab = Table(sps_hdu['TRUTH'].data, masked=True) sps_hdu.close() objs = sps_tab['TEMPLATETYPE'] == objtype if np.sum(objs) == 0: continue # Load spectra (flux or not fluxed; should not matter) for ii in range(10): camera = channel + str(ii) cframe_path = findfile(filetype='cframe', night=night, expid=exposure, camera=camera) try: cframe = read_frame(cframe_path) except: log.warn("Cannot find file: {:s}".format(cframe_path)) continue # Calculate S/N per Ang dwave = cframe.wave - np.roll(cframe.wave, 1) dwave[0] = dwave[1] # iobjs = objs[cframe.fibers] if np.sum(iobjs) == 0: continue s2n = cframe.flux[iobjs, :] * np.sqrt( cframe.ivar[iobjs, :]) / np.sqrt(dwave) # Save fdict['waves'].append(cframe.wave) fdict['s2n'].append(s2n) fdict['fluxes'].append(sps_tab['MAG'][cframe.fibers[iobjs]]) if objtype == 'ELG': fdict['OII'].append( sps_tab['OIIFLUX'][cframe.fibers[iobjs]]) fdict['exptime'].append(cframe.meta['EXPTIME']) # Return return fdict
def main(args): log = get_logger() frame=read_frame(args.infile, skip_resolution=True) fibermap=read_fibermap(args.infile) fiberflat=read_fiberflat(args.fiberflat) skymodel=read_sky(args.sky) fluxcalib=read_flux_calibration(args.calib) cam=args.infile.split('/')[-1].split('-')[1] band=cam[0] bands=[band] # Indices of sky fibers. sky_indx = np.where(fibermap['OBJTYPE'] == 'SKY')[0] rd_var, sky_var = calc_var(bands, args.nea, args.psf, frame, fluxcalib, fiberflat, skymodel, components=True) var = calc_var(bands, args.nea, args.psf, frame, fluxcalib, fiberflat, skymodel, components=False) nsky = 4 fig, axes = plt.subplots(1, nsky, figsize=(5 * nsky, 5)) for i in range(nsky): def calc_alphavar(alpha): return alpha * rd_var[sky_indx,:] + sky_var[sky_indx,:] def alpha_fit(alpha): _var = calc_alphavar(alpha) ivar = 1. / _var X2 = (frame.ivar[sky_indx,:] - ivar)**2. return np.sum(X2) res = minimize(alpha_fit, x0=[1.]) alpha = res.x[0] indx = sky_indx[i] axes[i].plot(skymodel.wave, median_filter(frame.ivar[indx,:], 10), lw=0.4, label='Sky frame IVAR', alpha=0.4) axes[i].plot(skymodel.wave, 1./rd_var[indx,:], lw=0.4, label='Model rd. IVAR', alpha=0.4) # axes[i].plot(skymodel.wave, 1./sky_var[indx,:], lw=0.4, label='Model Sky IVAR', alpha=0.4) # axes[i].plot(skymodel.wave, 1./var[indx,:], lw=0.4, label=r'Model IVAR', alpha=0.4) axes[i].plot(skymodel.wave, median_filter(1./calc_alphavar(alpha)[i,:], 10), lw=0.4, label=r'$\alpha$ Model IVAR', alpha=0.4) axes[i].set_title(r'Fiber {:d} ($\alpha$ = {:.6f})'.format(indx, alpha)) axes[i].set_xlabel(r'Wavelength [$AA$]') axes[i].set_yscale('log') axes[i].set_ylim(bottom=5.e-4, top=3.e-2) axes[i].legend(frameon=False, loc=2) axes[0].set_ylabel('e/A') pl.show()
def main() : parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--infile', type = str, default = None, required=True, help = 'path of DESI exposure frame fits file') parser.add_argument('--fibermap', type = str, default = None, required=True, help = 'path of DESI exposure frame fits file') parser.add_argument('--fiberflat', type = str, default = None, required=True, help = 'path of DESI fiberflat fits file') parser.add_argument('--outfile', type = str, default = None, required=True, help = 'path of DESI sky fits file') args = parser.parse_args() log=get_logger() log.info("starting") # read exposure to load data and get range of spectra frame = read_frame(args.infile) specmin=frame.header["SPECMIN"] specmax=frame.header["SPECMAX"] # read fibermap to locate sky fibers fibermap = read_fibermap(args.fibermap) selection=np.where((fibermap["OBJTYPE"]=="SKY")&(fibermap["FIBER"]>=specmin)&(fibermap["FIBER"]<=specmax))[0] if selection.size == 0 : log.error("no sky fiber in fibermap %s"%args.fibermap) sys.exit(12) # read fiberflat fiberflat = read_fiberflat(args.fiberflat) # apply fiberflat to sky fibers apply_fiberflat(frame, fiberflat) # compute sky model skymodel = compute_sky(frame, fibermap) # write result write_sky(args.outfile, skymodel, frame.header) log.info("successfully wrote %s"%args.outfile)
def main(args): if args.mpi: from mpi4py import MPI comm = MPI.COMM_WORLD return main_mpi(args, comm) psf_file = args.psf input_file = args.input specmin = args.specmin nspec = args.nspec #- Load input files psf = load_psf(psf_file) img = io.read_image(input_file) if nspec is None: nspec = psf.nspec specmax = specmin + nspec if args.fibermap_index is not None : fibermin = args.fibermap_index else : camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * 500 + specmin print('Starting {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin+nspec, time.asctime())) if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) else: try: fibermap = io.read_fibermap(args.input) except (AttributeError, IOError, KeyError): fibermap = None #- Trim fibermap to matching fiber range and create fibers array if fibermap: ii = np.in1d(fibermap['FIBER'], np.arange(fibermin, fibermin+nspec)) fibermap = fibermap[ii] fibers = fibermap['FIBER'] else: fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = [float(tmp) for tmp in args.wavelength.split(',')] else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.7 if args.heliocentric_correction : heliocentric_correction_factor = heliocentric_correction_multiplicative_factor(img.meta) wstart /= heliocentric_correction_factor wstop /= heliocentric_correction_factor dw /= heliocentric_correction_factor else : heliocentric_correction_factor = 1. wave = np.arange(wstart, wstop+dw/2.0, dw) nwave = len(wave) bundlesize = args.bundlesize #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(list(range(specmin, specmax)), y=0)) psf_wavemax = np.min(psf.wavelength(list(range(specmin, specmax)), y=psf.npix_y-1)) if psf_wavemin-5 > wstart: raise ValueError('Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'.format(wstart, psf_wavemin)) if psf_wavemax+5 < wstop: raise ValueError('Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'.format(wstop, psf_wavemax)) #- Print parameters print("""\ #--- Extraction Parameters --- input: {input} psf: {psf} output: {output} wavelength: {wstart} - {wstop} AA steps {dw} specmin: {specmin} nspec: {nspec} regularize: {regularize} #-----------------------------\ """.format(input=input_file, psf=psf_file, output=args.output, wstart=wstart, wstop=wstop, dw=dw, specmin=specmin, nspec=nspec, regularize=args.regularize)) #- The actual extraction results = ex2d(img.pix, img.ivar*(img.mask==0), psf, specmin, nspec, wave, regularize=args.regularize, ndecorr=args.decorrelate_fibers, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose, full_output=True, nsubbundles=args.nsubbundles,psferr=args.psferr) flux = results['flux'] ivar = results['ivar'] Rdata = results['resolution_data'] chi2pix = results['chi2pix'] mask = np.zeros(flux.shape, dtype=np.uint32) mask[results['pixmask_fraction']>0.5] |= specmask.SOMEBADPIX mask[results['pixmask_fraction']==1.0] |= specmask.ALLBADPIX mask[chi2pix>100.0] |= specmask.BAD2DFIT if heliocentric_correction_factor != 1 : #- Apply heliocentric correction factor to the wavelength #- without touching the spectra, that is the whole point wave *= heliocentric_correction_factor wstart *= heliocentric_correction_factor wstop *= heliocentric_correction_factor dw *= heliocentric_correction_factor img.meta['HELIOCOR'] = heliocentric_correction_factor #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP']= (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') frame = Frame(wave, flux, ivar, mask=mask, resolution_data=Rdata, fibers=fibers, meta=img.meta, fibermap=fibermap, chi2pix=chi2pix) #- Add unit # In specter.extract.ex2d one has flux /= dwave # to convert the measured total number of electrons per # wavelength node to an electron 'density' frame.meta['BUNIT'] = 'count/Angstrom' #- Add scores to frame if not args.no_scores : compute_and_append_frame_scores(frame,suffix="RAW") #- Write output io.write_frame(args.output, frame) if args.model is not None: from astropy.io import fits fits.writeto(args.model, results['modelimage'], header=frame.meta, overwrite=True) print('Done {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin+nspec, time.asctime()))
def main_mpi(args, comm=None, timing=None): freeze_iers() nproc = 1 rank = 0 if comm is not None: nproc = comm.size rank = comm.rank mark_start = time.time() log = get_logger() psf_file = args.psf input_file = args.input # these parameters are interpreted as the *global* spec range, # to be divided among processes. specmin = args.specmin nspec = args.nspec #- Load input files and broadcast # FIXME: after we have fixed the serialization # of the PSF, read and broadcast here, to reduce # disk contention. img = None if rank == 0: img = io.read_image(input_file) if comm is not None: img = comm.bcast(img, root=0) psf = load_psf(psf_file) mark_read_input = time.time() # get spectral range if nspec is None: nspec = psf.nspec if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) else: try: fibermap = io.read_fibermap(args.input) except (AttributeError, IOError, KeyError): fibermap = None if fibermap is not None: fibermap = fibermap[specmin:specmin + nspec] if nspec > len(fibermap): log.warning( "nspec {} > len(fibermap) {}; reducing nspec to {}".format( nspec, len(fibermap), len(fibermap))) nspec = len(fibermap) fibers = fibermap['FIBER'] else: fibers = np.arange(specmin, specmin + nspec) specmax = specmin + nspec #- Get wavelength grid from options if args.wavelength is not None: raw_wstart, raw_wstop, raw_dw = [ float(tmp) for tmp in args.wavelength.split(',') ] else: raw_wstart = np.ceil(psf.wmin_all) raw_wstop = np.floor(psf.wmax_all) raw_dw = 0.7 raw_wave = np.arange(raw_wstart, raw_wstop + raw_dw / 2.0, raw_dw) nwave = len(raw_wave) bundlesize = args.bundlesize if args.barycentric_correction: if ('RA' in img.meta) or ('TARGTRA' in img.meta): barycentric_correction_factor = \ barycentric_correction_multiplicative_factor(img.meta) #- Early commissioning has RA/TARGTRA in fibermap but not HDU 0 elif fibermap is not None and \ (('RA' in fibermap.meta) or ('TARGTRA' in fibermap.meta)): barycentric_correction_factor = \ barycentric_correction_multiplicative_factor(fibermap.meta) else: msg = 'Barycentric corr requires (TARGT)RA in HDU 0 or fibermap' log.critical(msg) raise KeyError(msg) else: barycentric_correction_factor = 1. # Explictly define the correct wavelength values to avoid confusion of reference frame # If correction applied, otherwise divide by 1 and use the same raw values wstart = raw_wstart / barycentric_correction_factor wstop = raw_wstop / barycentric_correction_factor dw = raw_dw / barycentric_correction_factor wave = raw_wave / barycentric_correction_factor #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(list(range(specmin, specmax)), y=-0.5)) psf_wavemax = np.min( psf.wavelength(list(range(specmin, specmax)), y=psf.npix_y - 0.5)) if psf_wavemin - 5 > wstart: raise ValueError( 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'. format(wstart, psf_wavemin)) if psf_wavemax + 5 < wstop: raise ValueError( 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'. format(wstop, psf_wavemax)) if rank == 0: #- Print parameters log.info("extract: input = {}".format(input_file)) log.info("extract: psf = {}".format(psf_file)) log.info("extract: specmin = {}".format(specmin)) log.info("extract: nspec = {}".format(nspec)) log.info("extract: wavelength = {},{},{}".format(wstart, wstop, dw)) log.info("extract: nwavestep = {}".format(args.nwavestep)) log.info("extract: regularize = {}".format(args.regularize)) if barycentric_correction_factor != 1.: img.meta['HELIOCOR'] = barycentric_correction_factor #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (raw_wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (raw_wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP'] = (raw_dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (io.shorten_filename(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = io.shorten_filename(input_file) depend.add_dependencies(img.meta) #- Check if input PSF was itself a traceshifted version of another PSF orig_psf = None if rank == 0: try: psfhdr = fits.getheader(psf_file, 'PSF') orig_psf = psfhdr['IN_PSF'] except KeyError: #- could happen due to PSF format not having "PSF" extension, #- or due to PSF header not having 'IN_PSF' keyword. Either is OK pass if comm is not None: orig_psf = comm.bcast(orig_psf, root=0) if orig_psf is not None: img.meta['ORIG_PSF'] = orig_psf #- If not using MPI, use a single call to each of these and then end this function call # Otherwise, continue on to splitting things up for the different ranks if comm is None: _extract_and_save(img, psf, specmin, nspec, specmin, wave, raw_wave, fibers, fibermap, args.output, args.model, bundlesize, args, log) #- This is it if we aren't running MPI, so return return #else: # # Continue to the MPI section, which could go under this else statment # # But to save on indentation we'll just pass on to the rest of the function # # since the alternative has already returned # pass # Now we divide our spectra into bundles checkbundles = set() checkbundles.update( np.floor_divide(np.arange(specmin, specmax), bundlesize * np.ones(nspec)).astype(int)) bundles = sorted(checkbundles) nbundle = len(bundles) bspecmin = {} bnspec = {} for b in bundles: if specmin > b * bundlesize: bspecmin[b] = specmin else: bspecmin[b] = b * bundlesize if (b + 1) * bundlesize > specmax: bnspec[b] = specmax - bspecmin[b] else: bnspec[b] = bundlesize # Now we assign bundles to processes mynbundle = int(nbundle // nproc) myfirstbundle = 0 leftover = nbundle % nproc if rank < leftover: mynbundle += 1 myfirstbundle = rank * mynbundle else: myfirstbundle = ((mynbundle + 1) * leftover) + (mynbundle * (rank - leftover)) # get the root output file outpat = re.compile(r'(.*)\.fits') outmat = outpat.match(args.output) if outmat is None: raise RuntimeError( "extraction output file should have .fits extension") outroot = outmat.group(1) outdir = os.path.normpath(os.path.dirname(outroot)) if rank == 0: if not os.path.isdir(outdir): os.makedirs(outdir) if comm is not None: comm.barrier() mark_preparation = time.time() time_total_extraction = 0.0 time_total_write_output = 0.0 failcount = 0 for b in range(myfirstbundle, myfirstbundle + mynbundle): mark_iteration_start = time.time() outbundle = "{}_{:02d}.fits".format(outroot, b) outmodel = "{}_model_{:02d}.fits".format(outroot, b) log.info('extract: Rank {} extracting {} spectra {}:{} at {}'.format( rank, os.path.basename(input_file), bspecmin[b], bspecmin[b] + bnspec[b], time.asctime(), )) sys.stdout.flush() #- The actual extraction try: mark_extraction = _extract_and_save(img, psf, bspecmin[b], bnspec[b], specmin, wave, raw_wave, fibers, fibermap, outbundle, outmodel, bundlesize, args, log) mark_write_output = time.time() time_total_extraction += mark_extraction - mark_iteration_start time_total_write_output += mark_write_output - mark_extraction except: # Log the error and increment the number of failures log.error( "extract: FAILED bundle {}, spectrum range {}:{}".format( b, bspecmin[b], bspecmin[b] + bnspec[b])) exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) log.error(''.join(lines)) failcount += 1 sys.stdout.flush() if comm is not None: failcount = comm.allreduce(failcount) if failcount > 0: # all processes throw raise RuntimeError("some extraction bundles failed") time_merge = None if rank == 0: mark_merge_start = time.time() mergeopts = ['--output', args.output, '--force', '--delete'] mergeopts.extend( ["{}_{:02d}.fits".format(outroot, b) for b in bundles]) mergeargs = mergebundles.parse(mergeopts) mergebundles.main(mergeargs) if args.model is not None: model = None for b in bundles: outmodel = "{}_model_{:02d}.fits".format(outroot, b) if model is None: model = fits.getdata(outmodel) else: #- TODO: test and warn if models overlap for pixels with #- non-zero values model += fits.getdata(outmodel) os.remove(outmodel) fits.writeto(args.model, model) mark_merge_end = time.time() time_merge = mark_merge_end - mark_merge_start # Resolve difference timer data if type(timing) is dict: timing["read_input"] = mark_read_input - mark_start timing["preparation"] = mark_preparation - mark_read_input timing["total_extraction"] = time_total_extraction timing["total_write_output"] = time_total_write_output timing["merge"] = time_merge
def main(args): psf_file = args.psf input_file = args.input specmin = args.specmin nspec = args.nspec #- Load input files psf = load_psf(psf_file) img = io.read_image(input_file) if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin print('Starting {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin+nspec, time.asctime())) if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin+nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = map(float, args.wavelength.split(',')) else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop+dw/2.0, dw) nwave = len(wave) bundlesize = args.bundlesize #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(range(specmin, specmax), y=0)) psf_wavemax = np.min(psf.wavelength(range(specmin, specmax), y=psf.npix_y-1)) if psf_wavemin > wstart: raise ValueError, 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'.format(wstart, psf_wavemin) if psf_wavemax < wstop: raise ValueError, 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'.format(wstop, psf_wavemax) #- Print parameters print """\ #--- Extraction Parameters --- input: {input} psf: {psf} output: {output} wavelength: {wstart} - {wstop} AA steps {dw} specmin: {specmin} nspec: {nspec} regularize: {regularize} #-----------------------------\ """.format(input=input_file, psf=psf_file, output=args.output, wstart=wstart, wstop=wstop, dw=dw, specmin=specmin, nspec=nspec, regularize=args.regularize) #- The actual extraction results = ex2d(img.pix, img.ivar*(img.mask==0), psf, specmin, nspec, wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose, full_output=True) flux = results['flux'] ivar = results['ivar'] Rdata = results['resolution_data'] chi2pix = results['chi2pix'] mask = np.zeros(flux.shape, dtype=np.uint32) mask[results['pixmask_fraction']>0.5] |= specmask.SOMEBADPIX mask[results['pixmask_fraction']==1.0] |= specmask.ALLBADPIX mask[chi2pix>100.0] |= specmask.BAD2DFIT #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP']= (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') frame = Frame(wave, flux, ivar, mask=mask, resolution_data=Rdata, fibers=fibers, meta=img.meta, fibermap=fibermap, chi2pix=chi2pix) #- Write output io.write_frame(args.output, frame) if args.model is not None: from astropy.io import fits fits.writeto(args.model, results['modelimage'], header=frame.meta, clobber=True) print('Done {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin+nspec, time.asctime()))
def main() : """ finds the best models of all standard stars in the frame and normlize the model flux. Output is written to a file and will be called for calibration. """ parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--fiberflatexpid', type = int, help = 'fiberflat exposure ID') parser.add_argument('--fibermap', type = str, help = 'path of fibermap file') parser.add_argument('--models', type = str, help = 'path of spectro-photometric stellar spectra fits') parser.add_argument('--spectrograph', type = int, default = 0, help = 'spectrograph number, can go 0-9') parser.add_argument('--outfile', type = str, help = 'output file for normalized stdstar model flux') args = parser.parse_args() log = get_logger() # Call necessary environment variables. No need if add argument to give full file path. if 'DESI_SPECTRO_REDUX' not in os.environ: raise RuntimeError('Set environment DESI_SPECTRO_REDUX. It is needed to read the needed datafiles') DESI_SPECTRO_REDUX=os.environ['DESI_SPECTRO_REDUX'] PRODNAME=os.environ['PRODNAME'] if 'DESISIM' not in os.environ: raise RuntimeError('Set environment DESISIM. It will be neede to read the filter transmission files for calibration') DESISIM=os.environ['DESISIM'] # to read the filter transmission files if args.fibermap is None or args.models is None or \ args.spectrograph is None or args.outfile is None or \ args.fiberflatexpid is None: log.critical('Missing a required argument') parser.print_help() sys.exit(12) # read Standard Stars from the fibermap file # returns the Fiber id, filter names and mags for the standard stars fiber_tbdata,fiber_header=io.read_fibermap(args.fibermap, header=True) #- Trim to just fibers on this spectrograph ii = (500*args.spectrograph <= fiber_tbdata["FIBER"]) ii &= (fiber_tbdata["FIBER"] < 500*(args.spectrograph+1)) fiber_tbdata = fiber_tbdata[ii] #- Get info for the standard stars refStarIdx=np.where(fiber_tbdata["OBJTYPE"]=="STD") refFibers=fiber_tbdata["FIBER"][refStarIdx] refFilters=fiber_tbdata["FILTER"][refStarIdx] refMags=fiber_tbdata["MAG"] fibers={"FIBER":refFibers,"FILTER":refFilters,"MAG":refMags} NIGHT=fiber_header['NIGHT'] EXPID=fiber_header['EXPID'] filters=fibers["FILTER"] if 'DESISIM' not in os.environ: raise RuntimeError('Set environment DESISIM. Can not find filter response files') basepath=DESISIM+"/data/" #now load all the skyfiles, framefiles, fiberflatfiles etc # all three channels files are simultaneously treated for model fitting skyfile={} framefile={} fiberflatfile={} for i in ["b","r","z"]: camera = i+str(args.spectrograph) skyfile[i] = io.findfile('sky', NIGHT, EXPID, camera) framefile[i] = io.findfile('frame', NIGHT, EXPID, camera) fiberflatfile[i] = io.findfile('fiberflat', NIGHT, args.fiberflatexpid, camera) #Read Frames, Flats and Sky files frameFlux={} frameIvar={} frameWave={} frameResolution={} framehdr={} fiberFlat={} ivarFlat={} maskFlat={} meanspecFlat={} waveFlat={} headerFlat={} sky={} skyivar={} skymask={} skywave={} skyhdr={} for i in ["b","r","z"]: #arg=(night,expid,'%s%s'%(i,spectrograph)) #- minimal code change for refactored I/O, while not taking advantage of simplified structure frame = io.read_frame(framefile[i]) frameFlux[i] = frame.flux frameIvar[i] = frame.ivar frameWave[i] = frame.wave frameResolution[i] = frame.resolution_data framehdr[i] = frame.header ff = io.read_fiberflat(fiberflatfile[i]) fiberFlat[i] = ff.fiberflat ivarFlat[i] = ff.ivar maskFlat[i] = ff.mask meanspecFlat[i] = ff.meanspec waveFlat[i] = ff.wave headerFlat[i] = ff.header skymodel = io.read_sky(skyfile[i]) sky[i] = skymodel.flux skyivar[i] = skymodel.ivar skymask[i] = skymodel.mask skywave[i] = skymodel.wave skyhdr[i] = skymodel.header # Convolve Sky with Detector Resolution, so as to subtract from data. Convolve for all 500 specs. Subtracting sky this way should be equivalent to sky_subtract convolvedsky={"b":sky["b"], "r":sky["r"], "z":sky["z"]} # Read the standard Star data and divide by flat and subtract sky stars=[] ivars=[] for i in fibers["FIBER"]: #flat and sky should have same wavelength binning as data, otherwise should be rebinned. stars.append((i,{"b":[frameFlux["b"][i]/fiberFlat["b"][i]-convolvedsky["b"][i],frameWave["b"]], "r":[frameFlux["r"][i]/fiberFlat["r"][i]-convolvedsky["r"][i],frameWave["r"]], "z":[frameFlux["z"][i]/fiberFlat["z"][i]-convolvedsky["z"][i],frameWave]},fibers["MAG"][i])) ivars.append((i,{"b":[frameIvar["b"][i]],"r":[frameIvar["r"][i,:]],"z":[frameIvar["z"][i,:]]})) stdwave,stdflux,templateid=io.read_stdstar_templates(args.models) #- Trim standard star wavelengths to just the range we need minwave = min([min(w) for w in frameWave.values()]) maxwave = max([max(w) for w in frameWave.values()]) ii = (minwave-10 < stdwave) & (stdwave < maxwave+10) stdwave = stdwave[ii] stdflux = stdflux[:, ii] log.info('Number of Standard Stars in this frame: {0:d}'.format(len(stars))) if len(stars) == 0: log.critical("No standard stars! Exiting") sys.exit(1) # Now for each star, find the best model and normalize. normflux=[] bestModelIndex=np.arange(len(stars)) templateID=np.arange(len(stars)) chi2dof=np.zeros(len(stars)) #- TODO: don't use 'l' as a variable name. Can look like a '1' for k,l in enumerate(stars): log.info("checking best model for star {0}".format(l[0])) starindex=l[0] mags=l[2] filters=fibers["FILTER"][k] rflux=stars[k][1]["r"][0] bflux=stars[k][1]["b"][0] zflux=stars[k][1]["z"][0] flux={"b":bflux,"r":rflux,"z":zflux} #print ivars rivar=ivars[k][1]["r"][0] bivar=ivars[k][1]["b"][0] zivar=ivars[k][1]["z"][0] ivar={"b":bivar,"r":rivar,"z":zivar} resol_star={"r":frameResolution["r"][l[0]],"b":frameResolution["b"][l[0]],"z":frameResolution["z"][l[0]]} # Now find the best Model bestModelIndex[k],bestmodelWave,bestModelFlux,chi2dof[k]=match_templates(frameWave,flux,ivar,resol_star,stdwave,stdflux) log.info('Star Fiber: {0}; Best Model Fiber: {1}; TemplateID: {2}; Chisq/dof: {3}'.format(l[0],bestModelIndex[k],templateid[bestModelIndex[k]],chi2dof[k])) # Normalize the best model using reported magnitude modelwave,normalizedflux=normalize_templates(stdwave,stdflux[bestModelIndex[k]],mags,filters,basepath) normflux.append(normalizedflux) # Now write the normalized flux for all best models to a file normflux=np.array(normflux) stdfibers=fibers["FIBER"] data={} data['BESTMODEL']=bestModelIndex data['CHI2DOF']=chi2dof data['TEMPLATEID']=templateid[bestModelIndex] norm_model_file=args.outfile io.write_stdstar_model(norm_model_file,normflux,stdwave,stdfibers,data)
def main(args=None): if args is None: args = parse() elif isinstance(args, (list, tuple)): args = parse(args) bias=True if args.bias : bias=args.bias if args.nobias : bias=False dark=True if args.dark : dark=args.dark if args.nodark : dark=False pixflat=True if args.pixflat : pixflat=args.pixflat if args.nopixflat : pixflat=False mask=True if args.mask : mask=args.mask if args.nomask : mask=False if args.cameras is None: args.cameras = [c+str(i) for c in 'brz' for i in range(10)] else: args.cameras = args.cameras.split(',') if (args.bias is not None) or (args.pixflat is not None) or (args.mask is not None) or (args.dark is not None): if len(args.cameras) > 1: raise ValueError('must use only one camera with --bias, --dark, --pixflat, --mask options') if (args.outfile is not None) and len(args.cameras) > 1: raise ValueError('must use only one camera with --outfile option') if args.outdir is None: args.outdir = os.getcwd() log.warning('--outdir not specified; using {}'.format(args.outdir)) ccd_calibration_filename = None if args.no_ccd_calib_filename : ccd_calibration_filename = False elif args.ccd_calib_filename is not None : ccd_calibration_filename = args.ccd_calib_filename if args.fibermap and not os.path.exists(fibermap): raise ValueError('--fibermap {} not found'.format(args.fibermap)) if args.fibermap is None: datadir, infile = os.path.split(os.path.abspath(args.infile)) fibermapfile = infile.replace('desi-', 'fibermap-').replace('.fits.fz', '.fits') args.fibermap = os.path.join(datadir, fibermapfile) if args.nofibermap: fibermap = None elif os.path.exists(args.fibermap): fibermap = io.read_fibermap(args.fibermap) else: log.warning('fibermap file not found; creating blank fibermap') fibermap = io.empty_fibermap(5000) for camera in args.cameras: try: img = io.read_raw(args.infile, camera, bias=bias, dark=dark, pixflat=pixflat, mask=mask, bkgsub=args.bkgsub, nocosmic=args.nocosmic, cosmics_nsig=args.cosmics_nsig, cosmics_cfudge=args.cosmics_cfudge, cosmics_c2fudge=args.cosmics_c2fudge, ccd_calibration_filename=ccd_calibration_filename, nocrosstalk=args.nocrosstalk, nogain=args.nogain, nodarktrail=args.nodarktrail, fill_header=args.fill_header, ) except IOError: log.error('Error while reading or preprocessing camera {} in {}'.format(camera, args.infile)) continue if(args.zero_masked) : img.pix *= (img.mask==0) if args.outfile is None: night = img.meta['NIGHT'] expid = img.meta['EXPID'] outfile = io.findfile('preproc', night=night, expid=expid, camera=camera, outdir=args.outdir) else: outfile = args.outfile if fibermap: petal_loc = int(img.camera[1]) ii = (fibermap['PETAL_LOC'] == petal_loc) img.fibermap = fibermap[ii] io.write_image(outfile, img) log.info("Wrote {}".format(outfile))
def main(args): psf_file = args.psf input_file = args.input specmin = args.specmin nspec = args.nspec #- Load input files psf = load_psf(psf_file) img = io.read_image(input_file) if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin print('Starting {} spectra {}:{} at {}'.format( os.path.basename(input_file), specmin, specmin + nspec, time.asctime())) if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin + nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin + nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = map(float, args.wavelength.split(',')) else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop + dw / 2.0, dw) nwave = len(wave) bundlesize = args.bundlesize #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(range(specmin, specmax), y=0)) psf_wavemax = np.min( psf.wavelength(range(specmin, specmax), y=psf.npix_y - 1)) if psf_wavemin > wstart: raise ValueError, 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'.format( wstart, psf_wavemin) if psf_wavemax < wstop: raise ValueError, 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'.format( wstop, psf_wavemax) #- Print parameters print """\ #--- Extraction Parameters --- input: {input} psf: {psf} output: {output} wavelength: {wstart} - {wstop} AA steps {dw} specmin: {specmin} nspec: {nspec} regularize: {regularize} #-----------------------------\ """.format(input=input_file, psf=psf_file, output=args.output, wstart=wstart, wstop=wstop, dw=dw, specmin=specmin, nspec=nspec, regularize=args.regularize) #- The actual extraction flux, ivar, Rdata = ex2d(img.pix, img.ivar * (img.mask == 0), psf, specmin, nspec, wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose) #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP'] = (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') frame = Frame(wave, flux, ivar, resolution_data=Rdata, fibers=fibers, meta=img.meta, fibermap=fibermap) #- Write output io.write_frame(args.output, frame) print('Done {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin + nspec, time.asctime()))
def graph_night(rawdir, rawnight): grph = {} node = {} node['type'] = 'night' node['in'] = [] node['out'] = [] grph[rawnight] = node allbricks = {} expcount = {} expcount['flat'] = 0 expcount['arc'] = 0 expcount['science'] = 0 # First, insert raw data into the graph. We use the existence of the raw data # as a filter over spectrographs. Spectrographs whose raw data do not exist # are excluded from the graph. expid = io.get_exposures(rawnight, raw=True, rawdata_dir=rawdir) campat = re.compile(r'([brz])([0-9])') keepspec = set() for ex in sorted(expid): # get the fibermap for this exposure fibermap = io.get_raw_files("fibermap", rawnight, ex, rawdata_dir=rawdir) # read the fibermap to get the exposure type, and while we are at it, # also accumulate the total list of bricks fmdata, fmheader = io.read_fibermap(fibermap, header=True) flavor = fmheader['flavor'] fmbricks = {} for fmb in fmdata['BRICKNAME']: if len(fmb) > 0: if fmb in fmbricks.keys(): fmbricks[fmb] += 1 else: fmbricks[fmb] = 1 for fmb in fmbricks.keys(): if fmb in allbricks.keys(): allbricks[fmb] += fmbricks[fmb] else: allbricks[fmb] = fmbricks[fmb] if flavor == 'arc': expcount['arc'] += 1 elif flavor == 'flat': expcount['flat'] += 1 else: expcount['science'] += 1 node = {} node['type'] = 'fibermap' node['id'] = ex node['flavor'] = flavor node['bricks'] = fmbricks node['in'] = [rawnight] node['out'] = [] name = graph_name(rawnight, "fibermap-{:08d}".format(ex)) grph[name] = node grph[rawnight]['out'].append(name) # get the raw exposures raw = io.get_raw_files("pix", rawnight, ex, rawdata_dir=rawdir) for cam in sorted(raw.keys()): cammat = campat.match(cam) if cammat is None: raise RuntimeError("invalid camera string {}".format(cam)) band = cammat.group(1) spec = cammat.group(2) keepspec.update(spec) node = {} node['type'] = 'pix' node['id'] = ex node['band'] = band node['spec'] = spec node['flavor'] = flavor node['in'] = [rawnight] node['out'] = [] name = graph_name(rawnight, "pix-{}{}-{:08d}".format(band, spec, ex)) grph[name] = node grph[rawnight]['out'].append(name) keep = sorted(list(keepspec)) # Now that we have added all the raw data to the graph, we work our way # through the processing steps. # This step is a placeholder, in case we want to combine information from # multiple flats or arcs before running bootcalib. We mark these bootcalib # outputs as depending on all arcs and flats, but in reality we may just # use the first or last set. # Since each psfboot file takes multiple exposures as input, we first # create those nodes. for band in ['b', 'r', 'z']: for spec in keep: name = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) node = {} node['type'] = 'psfboot' node['band'] = band node['spec'] = spec node['in'] = [] node['out'] = [] grph[name] = node for name, nd in grph.items(): if nd['type'] != 'pix': continue if (nd['flavor'] != 'flat') and (nd['flavor'] != 'arc'): continue band = nd['band'] spec = nd['spec'] bootname = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) grph[bootname]['in'].append(name) nd['out'].append(bootname) # Next is full PSF estimation. Inputs are the arc image and the bootcalib # output file. We also add nodes for the combined psfs. for band in ['b', 'r', 'z']: for spec in keep: name = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) node = {} node['type'] = 'psfnight' node['band'] = band node['spec'] = spec node['in'] = [] node['out'] = [] grph[name] = node for name, nd in grph.items(): if nd['type'] != 'pix': continue if nd['flavor'] != 'arc': continue band = nd['band'] spec = nd['spec'] id = nd['id'] bootname = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) psfname = graph_name(rawnight, "psf-{}{}-{:08d}".format(band, spec, id)) psfnightname = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) node = {} node['type'] = 'psf' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, bootname] node['out'] = [psfnightname] grph[psfname] = node grph[bootname]['out'].append(psfname) grph[psfnightname]['in'].append(psfname) nd['out'].append(psfname) # Now we extract the flats and science frames using the nightly psf for name, nd in grph.items(): if nd['type'] != 'pix': continue if nd['flavor'] == 'arc': continue band = nd['band'] spec = nd['spec'] id = nd['id'] flavor = nd['flavor'] framename = graph_name(rawnight, "frame-{}{}-{:08d}".format(band, spec, id)) psfnightname = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) fmname = graph_name(rawnight, "fibermap-{:08d}".format(id)) node = {} node['type'] = 'frame' node['band'] = band node['spec'] = spec node['id'] = id node['flavor'] = flavor node['in'] = [name, fmname, psfnightname] node['out'] = [] grph[framename] = node grph[psfnightname]['out'].append(framename) grph[fmname]['out'].append(framename) nd['out'].append(framename) # Now build the fiberflats for each flat exposure. We keep a list of all # available fiberflats while we are looping over them, since we'll need # that in the next step to select the "most recent" fiberflat. flatexpid = {} for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] != 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'fiberflat' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name] node['out'] = [] grph[flatname] = node nd['out'].append(flatname) cam = "{}{}".format(band, spec) if cam not in flatexpid.keys(): flatexpid[cam] = [] flatexpid[cam].append(id) # To compute the sky file, we use the "most recent fiberflat" that came # before the current exposure. for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) node = {} node['type'] = 'sky' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname] node['out'] = [] grph[skyname] = node nd['out'].append(skyname) grph[flatname]['out'].append(skyname) # Construct the standard star files. These are one per spectrograph, # and depend on the frames and the corresponding flats and sky files. stdgrph = {} for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] starname = graph_name(rawnight, "stdstars-{}-{:08d}".format(spec, id)) # does this spectrograph exist yet in the graph? if starname not in stdgrph.keys(): fmname = graph_name(rawnight, "fibermap-{:08d}".format(id)) grph[fmname]['out'].append(starname) node = {} node['type'] = 'stdstars' node['spec'] = spec node['id'] = id node['in'] = [fmname] node['out'] = [] stdgrph[starname] = node cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) stdgrph[starname]['in'].extend([skyname, name, flatname]) nd['out'].append(starname) grph[flatname]['out'].append(starname) grph[skyname]['out'].append(starname) grph.update(stdgrph) # Construct calibration files for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) starname = graph_name(rawnight, "stdstars-{}-{:08d}".format(spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) calname = graph_name(rawnight, "calib-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'calib' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname, skyname, starname] node['out'] = [] grph[calname] = node grph[flatname]['out'].append(calname) grph[skyname]['out'].append(calname) grph[starname]['out'].append(calname) nd['out'].append(calname) # Build cframe files for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) calname = graph_name(rawnight, "calib-{}{}-{:08d}".format(band, spec, id)) cfname = graph_name(rawnight, "cframe-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'cframe' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname, skyname, calname] node['out'] = [] grph[cfname] = node grph[flatname]['out'].append(cfname) grph[skyname]['out'].append(cfname) grph[calname]['out'].append(cfname) nd['out'].append(cfname) # Brick / Zbest dependencies for b in allbricks.keys(): zbname = "zbest-{}".format(b) inb = [] for band in ['b', 'r', 'z']: node = {} node['type'] = 'brick' node['brick'] = b node['band'] = band node['in'] = [] node['out'] = [zbname] bname = "brick-{}-{}".format(band, b) inb.append(bname) grph[bname] = node node = {} node['type'] = 'zbest' node['brick'] = b node['ntarget'] = allbricks[b] node['in'] = inb node['out'] = [] grph[zbname] = node for name, nd in grph.items(): if nd['type'] != 'fibermap': continue if nd['flavor'] == 'arc': continue if nd['flavor'] == 'flat': continue id = nd['id'] bricks = nd['bricks'] for band in ['b', 'r', 'z']: for spec in keep: cfname = graph_name( rawnight, "cframe-{}{}-{:08d}".format(band, spec, id)) for b in bricks: bname = "brick-{}-{}".format(band, b) grph[bname]['in'].append(cfname) grph[cfname]['out'].append(bname) return (grph, expcount, allbricks)
def integration_test(night=None, nspec=5, clobber=False): """Run an integration test from raw data simulations through redshifts Args: night (str, optional): YEARMMDD, defaults to current night nspec (int, optional): number of spectra to include clobber (bool, optional): rerun steps even if outputs already exist Raises: RuntimeError if any script fails """ import argparse parser = argparse.ArgumentParser(usage = "{prog} [options]") # parser.add_argument("-i", "--input", type=str, help="input data") # parser.add_argument("-o", "--output", type=str, help="output data") parser.add_argument("--skip-psf", action="store_true", help="Skip PSF fitting step") args = parser.parse_args() log = logging.get_logger() # YEARMMDD string, rolls over at noon not midnight if night is None: night = "20160726" # check for required environment variables check_env() # simulate inputs sim(night, nspec=nspec, clobber=clobber) # raw and production locations rawdir = os.path.abspath(io.rawdata_root()) proddir = os.path.abspath(io.specprod_root()) # create production if clobber and os.path.isdir(proddir): shutil.rmtree(proddir) dbfile = io.get_pipe_database() if not os.path.exists(dbfile): com = "desi_pipe create --db-sqlite" log.info('Running {}'.format(com)) sp.check_call(com, shell=True) else: log.info("Using pre-existing production database {}".format(dbfile)) # Modify options file to restrict the spectral range optpath = os.path.join(proddir, "run", "options.yaml") opts = pipe.prod.yaml_read(optpath) opts['extract']['specmin'] = 0 opts['extract']['nspec'] = nspec opts['psf']['specmin'] = 0 opts['psf']['nspec'] = nspec opts['traceshift']['nfibers'] = nspec pipe.prod.yaml_write(optpath, opts) if args.skip_psf: #- Copy desimodel psf into this production instead of fitting psf import shutil for channel in ['b', 'r', 'z']: refpsf = '{}/data/specpsf/psf-{}.fits'.format( os.getenv('DESIMODEL'), channel) nightpsf = io.findfile('psfnight', night, camera=channel+'0') shutil.copy(refpsf, nightpsf) for expid in [0,1,2]: exppsf = io.findfile('psf', night, expid, camera=channel+'0') shutil.copy(refpsf, exppsf) #- Resync database to current state dbpath = io.get_pipe_database() db = pipe.load_db(dbpath, mode="w") db.sync(night) # Run the pipeline tasks in order from desispec.pipeline.tasks.base import default_task_chain for tasktype in default_task_chain: #- if we skip psf/psfnight/traceshift, update state prior to extractions if tasktype == 'traceshift' and args.skip_psf: db.getready() run_pipeline_step(tasktype) # #----- # #- Did it work? # #- (this combination of fibermap, simspec, and zbest is a pain) expid = 2 fmfile = io.findfile('fibermap', night=night, expid=expid) fibermap = io.read_fibermap(fmfile) simdir = os.path.dirname(fmfile) simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) siminfo = fits.getdata(simspec, 'TRUTH') try: elginfo = fits.getdata(simspec, 'TRUTH_ELG') except: elginfo = None from desimodel.footprint import radec2pix nside=64 pixels = np.unique(radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC'])) num_missing = 0 for pix in pixels: zfile = io.findfile('zbest', groupname=pix) if not os.path.exists(zfile): log.error('Missing {}'.format(zfile)) num_missing += 1 if num_missing > 0: log.critical('{} zbest files missing'.format(num_missing)) sys.exit(1) print() print("--------------------------------------------------") print("Pixel True z -> Class z zwarn") # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") for pix in pixels: zfile = io.findfile('zbest', groupname=pix) if not os.path.exists(zfile): log.error('Missing {}'.format(zfile)) continue zfx = fits.open(zfile, memmap=False) zbest = zfx['ZBEST'].data for i in range(len(zbest['Z'])): objtype = zbest['SPECTYPE'][i] z, zwarn = zbest['Z'][i], zbest['ZWARN'][i] j = np.where(fibermap['TARGETID'] == zbest['TARGETID'][i])[0][0] truetype = siminfo['OBJTYPE'][j] oiiflux = 0.0 if truetype == 'ELG': k = np.where(elginfo['TARGETID'] == zbest['TARGETID'][i])[0][0] oiiflux = elginfo['OIIFLUX'][k] truez = siminfo['REDSHIFT'][j] dv = C_LIGHT*(z-truez)/(1+truez) status = None if truetype == 'SKY' and zwarn > 0: status = 'ok' elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) elif zwarn == 0: if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150: status = 'ok' elif truetype == 'ELG' and objtype == 'GALAXY': if abs(dv) < 150: status = 'ok' elif oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) else: status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux) elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: status = 'ok' elif truetype in ('STD', 'FSTD') and objtype == 'STAR': status = 'ok' else: status = 'OOPS' else: status = 'OOPS' print('{0:<8d} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'.format( pix, truetype, truez, objtype, z, zwarn, status)) print("--------------------------------------------------")
def main(args): # Set up the logger if args.verbose: log = get_logger(DEBUG) else: log = get_logger() # Make sure all necessary environment variables are set DESI_SPECTRO_REDUX_DIR="./quickGen" if 'DESI_SPECTRO_REDUX' not in os.environ: log.info('DESI_SPECTRO_REDUX environment is not set.') else: DESI_SPECTRO_REDUX_DIR=os.environ['DESI_SPECTRO_REDUX'] if os.path.exists(DESI_SPECTRO_REDUX_DIR): if not os.path.isdir(DESI_SPECTRO_REDUX_DIR): raise RuntimeError("Path %s Not a directory"%DESI_SPECTRO_REDUX_DIR) else: try: os.makedirs(DESI_SPECTRO_REDUX_DIR) except: raise SPECPROD_DIR='specprod' if 'SPECPROD' not in os.environ: log.info('SPECPROD environment is not set.') else: SPECPROD_DIR=os.environ['SPECPROD'] prod_Dir=specprod_root() if os.path.exists(prod_Dir): if not os.path.isdir(prod_Dir): raise RuntimeError("Path %s Not a directory"%prod_Dir) else: try: os.makedirs(prod_Dir) except: raise # Initialize random number generator to use. np.random.seed(args.seed) random_state = np.random.RandomState(args.seed) # Derive spectrograph number from nstart if needed if args.spectrograph is None: args.spectrograph = args.nstart / 500 # Read fibermapfile to get object type, night and expid if args.fibermap: log.info("Reading fibermap file {}".format(args.fibermap)) fibermap=read_fibermap(args.fibermap) objtype = get_source_types(fibermap) stdindx=np.where(objtype=='STD') # match STD with STAR mwsindx=np.where(objtype=='MWS_STAR') # match MWS_STAR with STAR bgsindx=np.where(objtype=='BGS') # match BGS with LRG objtype[stdindx]='STAR' objtype[mwsindx]='STAR' objtype[bgsindx]='LRG' NIGHT=fibermap.meta['NIGHT'] EXPID=fibermap.meta['EXPID'] else: # Create a blank fake fibermap fibermap = empty_fibermap(args.nspec) targetids = random_state.randint(2**62, size=args.nspec) fibermap['TARGETID'] = targetids night = get_night() expid = 0 log.info("Initializing SpecSim with config {}".format(args.config)) desiparams = load_desiparams() qsim = get_simulator(args.config, num_fibers=1) if args.simspec: # Read the input file log.info('Reading input file {}'.format(args.simspec)) simspec = desisim.io.read_simspec(args.simspec) nspec = simspec.nspec if simspec.flavor == 'arc': log.warning("quickgen doesn't generate flavor=arc outputs") return else: wavelengths = simspec.wave spectra = simspec.flux if nspec < args.nspec: log.info("Only {} spectra in input file".format(nspec)) args.nspec = nspec else: # Initialize the output truth table. spectra = [] wavelengths = qsim.source.wavelength_out.to(u.Angstrom).value npix = len(wavelengths) truth = dict() meta = Table() truth['OBJTYPE'] = np.zeros(args.nspec, dtype=(str, 10)) truth['FLUX'] = np.zeros((args.nspec, npix)) truth['WAVE'] = wavelengths jj = list() for thisobj in set(true_objtype): ii = np.where(true_objtype == thisobj)[0] nobj = len(ii) truth['OBJTYPE'][ii] = thisobj log.info('Generating {} template'.format(thisobj)) # Generate the templates if thisobj == 'ELG': elg = desisim.templates.ELG(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = elg.make_templates(nmodel=nobj, seed=args.seed, zrange=args.zrange_elg,sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj == 'LRG': lrg = desisim.templates.LRG(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = lrg.make_templates(nmodel=nobj, seed=args.seed, zrange=args.zrange_lrg,sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj == 'QSO': qso = desisim.templates.QSO(wave=wavelengths) flux, tmpwave, meta1 = qso.make_templates(nmodel=nobj, seed=args.seed, zrange=args.zrange_qso) elif thisobj == 'BGS': bgs = desisim.templates.BGS(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = bgs.make_templates(nmodel=nobj, seed=args.seed, zrange=args.zrange_bgs,rmagrange=args.rmagrange_bgs,sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj =='STD': std = desisim.templates.STD(wave=wavelengths) flux, tmpwave, meta1 = std.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'QSO_BAD': # use STAR template no color cuts star = desisim.templates.STAR(wave=wavelengths) flux, tmpwave, meta1 = star.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'MWS_STAR' or thisobj == 'MWS': mwsstar = desisim.templates.MWS_STAR(wave=wavelengths) flux, tmpwave, meta1 = mwsstar.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'WD': wd = desisim.templates.WD(wave=wavelengths) flux, tmpwave, meta1 = wd.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'SKY': flux = np.zeros((nobj, npix)) meta1 = Table(dict(REDSHIFT=np.zeros(nobj, dtype=np.float32))) elif thisobj == 'TEST': flux = np.zeros((args.nspec, npix)) indx = np.where(wave>5800.0-1E-6)[0][0] ref_integrated_flux = 1E-10 ref_cst_flux_density = 1E-17 single_line = (np.arange(args.nspec)%2 == 0).astype(np.float32) continuum = (np.arange(args.nspec)%2 == 1).astype(np.float32) for spec in range(args.nspec) : flux[spec,indx] = single_line[spec]*ref_integrated_flux/np.gradient(wavelengths)[indx] # single line flux[spec] += continuum[spec]*ref_cst_flux_density # flat continuum meta1 = Table(dict(REDSHIFT=np.zeros(args.nspec, dtype=np.float32), LINE=wave[indx]*np.ones(args.nspec, dtype=np.float32), LINEFLUX=single_line*ref_integrated_flux, CONSTFLUXDENSITY=continuum*ref_cst_flux_density)) else: log.fatal('Unknown object type {}'.format(thisobj)) sys.exit(1) # Pack it in. truth['FLUX'][ii] = flux meta = vstack([meta, meta1]) jj.append(ii.tolist()) # Sanity check on units; templates currently return ergs, not 1e-17 ergs... # assert (thisobj == 'SKY') or (np.max(truth['FLUX']) < 1e-6) # Sort the metadata table. jj = sum(jj,[]) meta_new = Table() for k in range(args.nspec): index = int(np.where(np.array(jj) == k)[0]) meta_new = vstack([meta_new, meta[index]]) meta = meta_new # Add TARGETID and the true OBJTYPE to the metadata table. meta.add_column(Column(true_objtype, dtype=(str, 10), name='TRUE_OBJTYPE')) meta.add_column(Column(targetids, name='TARGETID')) # Rename REDSHIFT -> TRUEZ anticipating later table joins with zbest.Z meta.rename_column('REDSHIFT', 'TRUEZ') # explicitly set location on focal plane if needed to support airmass # variations when using specsim v0.5 if qsim.source.focal_xy is None: qsim.source.focal_xy = (u.Quantity(0, 'mm'), u.Quantity(100, 'mm')) # Set simulation parameters from the simspec header or desiparams bright_objects = ['bgs','mws','bright','BGS','MWS','BRIGHT_MIX'] gray_objects = ['gray','grey'] if args.simspec is None: object_type = objtype flavor = None elif simspec.flavor == 'science': object_type = None flavor = simspec.header['PROGRAM'] else: object_type = None flavor = simspec.flavor log.warning('Maybe using an outdated simspec file with flavor={}'.format(flavor)) # Set airmass if args.airmass is not None: qsim.atmosphere.airmass = args.airmass elif args.simspec and 'AIRMASS' in simspec.header: qsim.atmosphere.airmass = simspec.header['AIRMASS'] else: qsim.atmosphere.airmass = 1.25 # Science Req. Doc L3.3.2 # Set exptime if args.exptime is not None: qsim.observation.exposure_time = args.exptime * u.s elif args.simspec and 'EXPTIME' in simspec.header: qsim.observation.exposure_time = simspec.header['EXPTIME'] * u.s elif objtype in bright_objects: qsim.observation.exposure_time = desiparams['exptime_bright'] * u.s else: qsim.observation.exposure_time = desiparams['exptime_dark'] * u.s # Set Moon Phase if args.moon_phase is not None: qsim.atmosphere.moon.moon_phase = args.moon_phase elif args.simspec and 'MOONFRAC' in simspec.header: qsim.atmosphere.moon.moon_phase = simspec.header['MOONFRAC'] elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.moon_phase = 0.7 elif flavor in gray_objects: qsim.atmosphere.moon.moon_phase = 0.1 else: qsim.atmosphere.moon.moon_phase = 0.5 # Set Moon Zenith if args.moon_zenith is not None: qsim.atmosphere.moon.moon_zenith = args.moon_zenith * u.deg elif args.simspec and 'MOONALT' in simspec.header: qsim.atmosphere.moon.moon_zenith = simspec.header['MOONALT'] * u.deg elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.moon_zenith = 30 * u.deg elif flavor in gray_objects: qsim.atmosphere.moon.moon_zenith = 80 * u.deg else: qsim.atmosphere.moon.moon_zenith = 100 * u.deg # Set Moon - Object Angle if args.moon_angle is not None: qsim.atmosphere.moon.separation_angle = args.moon_angle * u.deg elif args.simspec and 'MOONSEP' in simspec.header: qsim.atmosphere.moon.separation_angle = simspec.header['MOONSEP'] * u.deg elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.separation_angle = 50 * u.deg elif flavor in gray_objects: qsim.atmosphere.moon.separation_angle = 60 * u.deg else: qsim.atmosphere.moon.separation_angle = 60 * u.deg # Initialize per-camera output arrays that will be saved waves, trueflux, noisyflux, obsivar, resolution, sflux = {}, {}, {}, {}, {}, {} maxbin = 0 nmax= args.nspec for camera in qsim.instrument.cameras: # Lookup this camera's resolution matrix and convert to the sparse # format used in desispec. R = Resolution(camera.get_output_resolution_matrix()) resolution[camera.name] = np.tile(R.to_fits_array(), [args.nspec, 1, 1]) waves[camera.name] = (camera.output_wavelength.to(u.Angstrom).value.astype(np.float32)) nwave = len(waves[camera.name]) maxbin = max(maxbin, len(waves[camera.name])) nobj = np.zeros((nmax,3,maxbin)) # object photons nsky = np.zeros((nmax,3,maxbin)) # sky photons nivar = np.zeros((nmax,3,maxbin)) # inverse variance (object+sky) cframe_observedflux = np.zeros((nmax,3,maxbin)) # calibrated object flux cframe_ivar = np.zeros((nmax,3,maxbin)) # inverse variance of calibrated object flux cframe_rand_noise = np.zeros((nmax,3,maxbin)) # random Gaussian noise to calibrated flux sky_ivar = np.zeros((nmax,3,maxbin)) # inverse variance of sky sky_rand_noise = np.zeros((nmax,3,maxbin)) # random Gaussian noise to sky only frame_rand_noise = np.zeros((nmax,3,maxbin)) # random Gaussian noise to nobj+nsky trueflux[camera.name] = np.empty((args.nspec, nwave)) # calibrated flux noisyflux[camera.name] = np.empty((args.nspec, nwave)) # observed flux with noise obsivar[camera.name] = np.empty((args.nspec, nwave)) # inverse variance of flux if args.simspec: for i in range(10): cn = camera.name + str(i) if cn in simspec.cameras: dw = np.gradient(simspec.cameras[cn].wave) break else: raise RuntimeError('Unable to find a {} camera in input simspec'.format(camera)) else: sflux = np.empty((args.nspec, npix)) #- Check if input simspec is for a continuum flat lamp instead of science #- This does not convolve to per-fiber resolution if args.simspec: if simspec.flavor == 'flat': log.info("Simulating flat lamp exposure") for i,camera in enumerate(qsim.instrument.cameras): channel = camera.name #- from simspec, b/r/z not b0/r1/z9 assert camera.output_wavelength.unit == u.Angstrom num_pixels = len(waves[channel]) phot = list() for j in range(10): cn = camera.name + str(j) if cn in simspec.cameras: camwave = simspec.cameras[cn].wave dw = np.gradient(camwave) phot.append(simspec.cameras[cn].phot) if len(phot) == 0: raise RuntimeError('Unable to find a {} camera in input simspec'.format(camera)) else: phot = np.vstack(phot) meanspec = resample_flux( waves[channel], camwave, np.average(phot/dw, axis=0)) fiberflat = random_state.normal(loc=1.0, scale=1.0 / np.sqrt(meanspec), size=(nspec, num_pixels)) ivar = np.tile(meanspec, [nspec, 1]) mask = np.zeros((simspec.nspec, num_pixels), dtype=np.uint32) for kk in range((args.nspec+args.nstart-1)//500+1): camera = channel+str(kk) outfile = desispec.io.findfile('fiberflat', NIGHT, EXPID, camera) start=max(500*kk,args.nstart) end=min(500*(kk+1),nmax) if (args.spectrograph <= kk): log.info("Writing files for channel:{}, spectrograph:{}, spectra:{} to {}".format(channel,kk,start,end)) ff = FiberFlat( waves[channel], fiberflat[start:end,:], ivar[start:end,:], mask[start:end,:], meanspec, header=dict(CAMERA=camera)) write_fiberflat(outfile, ff) filePath=desispec.io.findfile("fiberflat",NIGHT,EXPID,camera) log.info("Wrote file {}".format(filePath)) sys.exit(0) # Repeat the simulation for all spectra fluxunits = 1e-17 * u.erg / (u.s * u.cm ** 2 * u.Angstrom) for j in range(args.nspec): thisobjtype = objtype[j] sys.stdout.flush() if flavor == 'arc': qsim.source.update_in( 'Quickgen source {0}'.format, 'perfect', wavelengths * u.Angstrom, spectra * fluxunits) else: qsim.source.update_in( 'Quickgen source {0}'.format(j), thisobjtype.lower(), wavelengths * u.Angstrom, spectra[j, :] * fluxunits) qsim.source.update_out() qsim.simulate() qsim.generate_random_noise(random_state) for i, output in enumerate(qsim.camera_output): assert output['observed_flux'].unit == 1e17 * fluxunits # Extract the simulation results needed to create our uncalibrated # frame output file. num_pixels = len(output) nobj[j, i, :num_pixels] = output['num_source_electrons'][:,0] nsky[j, i, :num_pixels] = output['num_sky_electrons'][:,0] nivar[j, i, :num_pixels] = 1.0 / output['variance_electrons'][:,0] # Get results for our flux-calibrated output file. cframe_observedflux[j, i, :num_pixels] = 1e17 * output['observed_flux'][:,0] cframe_ivar[j, i, :num_pixels] = 1e-34 * output['flux_inverse_variance'][:,0] # Fill brick arrays from the results. camera = output.meta['name'] trueflux[camera][j][:] = 1e17 * output['observed_flux'][:,0] noisyflux[camera][j][:] = 1e17 * (output['observed_flux'][:,0] + output['flux_calibration'][:,0] * output['random_noise_electrons'][:,0]) obsivar[camera][j][:] = 1e-34 * output['flux_inverse_variance'][:,0] # Use the same noise realization in the cframe and frame, without any # additional noise from sky subtraction for now. frame_rand_noise[j, i, :num_pixels] = output['random_noise_electrons'][:,0] cframe_rand_noise[j, i, :num_pixels] = 1e17 * ( output['flux_calibration'][:,0] * output['random_noise_electrons'][:,0]) # The sky output file represents a model fit to ~40 sky fibers. # We reduce the variance by a factor of 25 to account for this and # give the sky an independent (Gaussian) noise realization. sky_ivar[j, i, :num_pixels] = 25.0 / ( output['variance_electrons'][:,0] - output['num_source_electrons'][:,0]) sky_rand_noise[j, i, :num_pixels] = random_state.normal( scale=1.0 / np.sqrt(sky_ivar[j,i,:num_pixels]),size=num_pixels) armName={"b":0,"r":1,"z":2} for channel in 'brz': #Before writing, convert from counts/bin to counts/A (as in Pixsim output) #Quicksim Default: #FLUX - input spectrum resampled to this binning; no noise added [1e-17 erg/s/cm2/s/Ang] #COUNTS_OBJ - object counts in 0.5 Ang bin #COUNTS_SKY - sky counts in 0.5 Ang bin num_pixels = len(waves[channel]) dwave=np.gradient(waves[channel]) nobj[:,armName[channel],:num_pixels]/=dwave frame_rand_noise[:,armName[channel],:num_pixels]/=dwave nivar[:,armName[channel],:num_pixels]*=dwave**2 nsky[:,armName[channel],:num_pixels]/=dwave sky_rand_noise[:,armName[channel],:num_pixels]/=dwave sky_ivar[:,armName[channel],:num_pixels]/=dwave**2 # Now write the outputs in DESI standard file system. None of the output file can have more than 500 spectra # Looping over spectrograph for ii in range((args.nspec+args.nstart-1)//500+1): start=max(500*ii,args.nstart) # first spectrum for a given spectrograph end=min(500*(ii+1),nmax) # last spectrum for the spectrograph if (args.spectrograph <= ii): camera = "{}{}".format(channel, ii) log.info("Writing files for channel:{}, spectrograph:{}, spectra:{} to {}".format(channel,ii,start,end)) num_pixels = len(waves[channel]) # Write frame file framefileName=desispec.io.findfile("frame",NIGHT,EXPID,camera) frame_flux=nobj[start:end,armName[channel],:num_pixels]+ \ nsky[start:end,armName[channel],:num_pixels] + \ frame_rand_noise[start:end,armName[channel],:num_pixels] frame_ivar=nivar[start:end,armName[channel],:num_pixels] sh1=frame_flux.shape[0] # required for slicing the resolution metric, resolusion matrix has (nspec,ndiag,wave) # for example if nstart =400, nspec=150: two spectrographs: # 400-499=> 0 spectrograph, 500-549 => 1 if (args.nstart==start): resol=resolution[channel][:sh1,:,:] else: resol=resolution[channel][-sh1:,:,:] # must create desispec.Frame object frame=Frame(waves[channel], frame_flux, frame_ivar,\ resolution_data=resol, spectrograph=ii, \ fibermap=fibermap[start:end], \ meta=dict(CAMERA=camera, FLAVOR=simspec.flavor) ) desispec.io.write_frame(framefileName, frame) framefilePath=desispec.io.findfile("frame",NIGHT,EXPID,camera) log.info("Wrote file {}".format(framefilePath)) if args.frameonly or simspec.flavor == 'arc': continue # Write cframe file cframeFileName=desispec.io.findfile("cframe",NIGHT,EXPID,camera) cframeFlux=cframe_observedflux[start:end,armName[channel],:num_pixels]+cframe_rand_noise[start:end,armName[channel],:num_pixels] cframeIvar=cframe_ivar[start:end,armName[channel],:num_pixels] # must create desispec.Frame object cframe = Frame(waves[channel], cframeFlux, cframeIvar, \ resolution_data=resol, spectrograph=ii, fibermap=fibermap[start:end], meta=dict(CAMERA=camera, FLAVOR=simspec.flavor) ) desispec.io.frame.write_frame(cframeFileName,cframe) cframefilePath=desispec.io.findfile("cframe",NIGHT,EXPID,camera) log.info("Wrote file {}".format(cframefilePath)) # Write sky file skyfileName=desispec.io.findfile("sky",NIGHT,EXPID,camera) skyflux=nsky[start:end,armName[channel],:num_pixels] + \ sky_rand_noise[start:end,armName[channel],:num_pixels] skyivar=sky_ivar[start:end,armName[channel],:num_pixels] skymask=np.zeros(skyflux.shape, dtype=np.uint32) # must create desispec.Sky object skymodel = SkyModel(waves[channel], skyflux, skyivar, skymask, header=dict(CAMERA=camera)) desispec.io.sky.write_sky(skyfileName, skymodel) skyfilePath=desispec.io.findfile("sky",NIGHT,EXPID,camera) log.info("Wrote file {}".format(skyfilePath)) # Write calib file calibVectorFile=desispec.io.findfile("calib",NIGHT,EXPID,camera) flux = cframe_observedflux[start:end,armName[channel],:num_pixels] phot = nobj[start:end,armName[channel],:num_pixels] calibration = np.zeros_like(phot) jj = (flux>0) calibration[jj] = phot[jj] / flux[jj] #- TODO: what should calibivar be? #- For now, model it as the noise of combining ~10 spectra calibivar=10/cframe_ivar[start:end,armName[channel],:num_pixels] #mask=(1/calibivar>0).astype(int)?? mask=np.zeros(calibration.shape, dtype=np.uint32) # write flux calibration fluxcalib = FluxCalib(waves[channel], calibration, calibivar, mask) write_flux_calibration(calibVectorFile, fluxcalib) calibfilePath=desispec.io.findfile("calib",NIGHT,EXPID,camera) log.info("Wrote file {}".format(calibfilePath))
def integration_test(night=None, nspec=5, clobber=False): """Run an integration test from raw data simulations through redshifts Args: night (str, optional): YEARMMDD, defaults to current night nspec (int, optional): number of spectra to include clobber (bool, optional): rerun steps even if outputs already exist Raises: RuntimeError if any script fails """ log = logging.get_logger() log.setLevel(logging.DEBUG) # YEARMMDD string, rolls over at noon not midnight # TODO: fix usage of night to be something other than today if night is None: #night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600)) night = "20160726" # check for required environment variables check_env() # simulate inputs sim(night, nspec=nspec, clobber=clobber) # create production # FIXME: someday run PSF estimation too... ### com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf" com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf" sp.check_call(com, shell=True) # raw and production locations rawdir = os.path.abspath(io.rawdata_root()) proddir = os.path.abspath(io.specprod_root()) # Modify options file to restrict the spectral range optpath = os.path.join(proddir, "run", "options.yaml") opts = pipe.yaml_read(optpath) opts['extract']['specmin'] = 0 opts['extract']['nspec'] = nspec pipe.yaml_write(optpath, opts) # run the generated shell scripts # FIXME: someday run PSF estimation too... # print("Running bootcalib script...") # com = os.path.join(proddir, "run", "scripts", "bootcalib_all.sh") # sp.check_call(["bash", com]) # print("Running specex script...") # com = os.path.join(proddir, "run", "scripts", "specex_all.sh") # sp.check_call(["bash", com]) # print("Running psfcombine script...") # com = os.path.join(proddir, "run", "scripts", "psfcombine_all.sh") # sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "run_shell.sh") print("Running extraction through calibration: " + com) sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "spectra.sh") print("Running spectral regrouping: " + com) sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "redshift.sh") print("Running redshift script " + com) sp.check_call(["bash", com]) # #----- # #- Did it work? # #- (this combination of fibermap, simspec, and zbest is a pain) expid = 2 fmfile = io.findfile('fibermap', night=night, expid=expid) fibermap = io.read_fibermap(fmfile) simdir = os.path.dirname(fmfile) simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) siminfo = fits.getdata(simspec, 'METADATA') brickdirs = glob.glob(os.path.join(proddir, "bricks", "*")) bricks = [os.path.basename(x) for x in brickdirs] print() print("--------------------------------------------------") print("Brick True z -> Class z zwarn") # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") for b in bricks: zbest = io.read_zbest(io.findfile('zbest', brickname=b)) for i in range(len(zbest.z)): if zbest.spectype[i] == 'ssp_em_galaxy': objtype = 'GAL' elif zbest.spectype[i] == 'spEigenStar': objtype = 'STAR' else: objtype = zbest.spectype[i] z, zwarn = zbest.z[i], zbest.zwarn[i] j = np.where(fibermap['TARGETID'] == zbest.targetid[i])[0][0] truetype = siminfo['OBJTYPE'][j] oiiflux = siminfo['OIIFLUX'][j] truez = siminfo['REDSHIFT'][j] dv = 3e5 * (z - truez) / (1 + truez) if truetype == 'SKY' and zwarn > 0: status = 'ok' elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) elif zwarn == 0: if truetype == 'LRG' and objtype == 'GAL' and abs(dv) < 150: status = 'ok' elif truetype == 'ELG' and objtype == 'GAL': if abs(dv) < 150 or oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) else: status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux) elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: status = 'ok' elif truetype in ('STD', 'FSTD') and objtype == 'STAR': status = 'ok' else: status = 'OOPS' else: status = 'OOPS' print('{0} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'. format(b, truetype, truez, objtype, z, zwarn, status)) print("--------------------------------------------------")
def main() : parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--infile', type = str, default = None, required=True, help = 'path of DESI exposure frame fits file') parser.add_argument('--fibermap', type = str, default = None, required=True, help = 'path of DESI exposure frame fits file') parser.add_argument('--fiberflat', type = str, default = None, required=True, help = 'path of DESI fiberflat fits file') parser.add_argument('--sky', type = str, default = None, required=True, help = 'path of DESI sky fits file') parser.add_argument('--models', type = str, default = None, required=True, help = 'path of spetro-photometric stellar spectra fits file') parser.add_argument('--outfile', type = str, default = None, required=True, help = 'path of DESI flux calbration fits file') args = parser.parse_args() log=get_logger() log.info("read frame") # read frame frame = read_frame(args.infile) log.info("apply fiberflat") # read fiberflat fiberflat = read_fiberflat(args.fiberflat) # apply fiberflat apply_fiberflat(frame, fiberflat) log.info("subtract sky") # read sky skymodel=read_sky(args.sky) # subtract sky subtract_sky(frame, skymodel) log.info("compute flux calibration") # read models model_flux,model_wave,model_fibers=read_stdstar_models(args.models) # select fibers SPECMIN=frame.header["SPECMIN"] SPECMAX=frame.header["SPECMAX"] selec=np.where((model_fibers>=SPECMIN)&(model_fibers<=SPECMAX))[0] if selec.size == 0 : log.error("no stellar models for this spectro") sys.exit(12) fibers=model_fibers[selec]-frame.header["SPECMIN"] log.info("star fibers= %s"%str(fibers)) table = read_fibermap(args.fibermap) bad=np.where(table["OBJTYPE"][fibers]!="STD")[0] if bad.size > 0 : for fiber in fibers[bad] : log.error("inconsistency with fiber %d, OBJTYPE='%s' in fibermap"%(fiber,table["OBJTYPE"][fiber])) sys.exit(12) fluxcalib = compute_flux_calibration(frame, fibers, model_wave, model_flux) # write result write_flux_calibration(args.outfile, fluxcalib, header=frame.header) log.info("successfully wrote %s"%args.outfile)
def main(args): psf_file = args.psf input_file = args.input specmin = args.specmin nspec = args.nspec #- Load input files psf = load_psf(psf_file) img = io.read_image(input_file) if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin print('Starting {} spectra {}:{} at {}'.format( os.path.basename(input_file), specmin, specmin + nspec, time.asctime())) if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin + nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin + nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = [float(tmp) for tmp in args.wavelength.split(',')] else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop + dw / 2.0, dw) nwave = len(wave) bundlesize = args.bundlesize #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(list(range(specmin, specmax)), y=0)) psf_wavemax = np.min( psf.wavelength(list(range(specmin, specmax)), y=psf.npix_y - 1)) if psf_wavemin > wstart: raise ValueError( 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'. format(wstart, psf_wavemin)) if psf_wavemax < wstop: raise ValueError( 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'. format(wstop, psf_wavemax)) #- Print parameters print("""\ #--- Extraction Parameters --- input: {input} psf: {psf} output: {output} wavelength: {wstart} - {wstop} AA steps {dw} specmin: {specmin} nspec: {nspec} regularize: {regularize} #-----------------------------\ """.format(input=input_file, psf=psf_file, output=args.output, wstart=wstart, wstop=wstop, dw=dw, specmin=specmin, nspec=nspec, regularize=args.regularize)) #- The actual extraction results = ex2d(img.pix, img.ivar * (img.mask == 0), psf, specmin, nspec, wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose, full_output=True) flux = results['flux'] ivar = results['ivar'] Rdata = results['resolution_data'] chi2pix = results['chi2pix'] mask = np.zeros(flux.shape, dtype=np.uint32) mask[results['pixmask_fraction'] > 0.5] |= specmask.SOMEBADPIX mask[results['pixmask_fraction'] == 1.0] |= specmask.ALLBADPIX mask[chi2pix > 100.0] |= specmask.BAD2DFIT #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP'] = (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') frame = Frame(wave, flux, ivar, mask=mask, resolution_data=Rdata, fibers=fibers, meta=img.meta, fibermap=fibermap, chi2pix=chi2pix) #- Write output io.write_frame(args.output, frame, units='photon/bin') if args.model is not None: from astropy.io import fits fits.writeto(args.model, results['modelimage'], header=frame.meta, clobber=True) print('Done {} spectra {}:{} at {}'.format(os.path.basename(input_file), specmin, specmin + nspec, time.asctime()))
def main_mpi(args, comm=None): log = get_logger() psf_file = args.psf input_file = args.input # these parameters are interpreted as the *global* spec range, # to be divided among processes. specmin = args.specmin nspec = args.nspec #- Load input files and broadcast # FIXME: after we have fixed the serialization # of the PSF, read and broadcast here, to reduce # disk contention. img = None if comm is None: img = io.read_image(input_file) else: if comm.rank == 0: img = io.read_image(input_file) img = comm.bcast(img, root=0) psf = load_psf(psf_file) # get spectral range if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin + nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin + nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = [float(tmp) for tmp in args.wavelength.split(',')] else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop + dw / 2.0, dw) nwave = len(wave) #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(list(range(specmin, specmax)), y=0)) psf_wavemax = np.min( psf.wavelength(list(range(specmin, specmax)), y=psf.npix_y - 1)) if psf_wavemin > wstart: raise ValueError( 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'. format(wstart, psf_wavemin)) if psf_wavemax < wstop: raise ValueError( 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'. format(wstop, psf_wavemax)) # Now we divide our spectra into bundles bundlesize = args.bundlesize checkbundles = set() checkbundles.update( np.floor_divide(np.arange(specmin, specmax), bundlesize * np.ones(nspec)).astype(int)) bundles = sorted(checkbundles) nbundle = len(bundles) bspecmin = {} bnspec = {} for b in bundles: if specmin > b * bundlesize: bspecmin[b] = specmin else: bspecmin[b] = b * bundlesize if (b + 1) * bundlesize > specmax: bnspec[b] = specmax - bspecmin[b] else: bnspec[b] = bundlesize # Now we assign bundles to processes nproc = 1 rank = 0 if comm is not None: nproc = comm.size rank = comm.rank mynbundle = int(nbundle // nproc) myfirstbundle = 0 leftover = nbundle % nproc if rank < leftover: mynbundle += 1 myfirstbundle = rank * mynbundle else: myfirstbundle = ((mynbundle + 1) * leftover) + (mynbundle * (rank - leftover)) if rank == 0: #- Print parameters log.info("extract: input = {}".format(input_file)) log.info("extract: psf = {}".format(psf_file)) log.info("extract: specmin = {}".format(specmin)) log.info("extract: nspec = {}".format(nspec)) log.info("extract: wavelength = {},{},{}".format(wstart, wstop, dw)) log.info("extract: nwavestep = {}".format(args.nwavestep)) log.info("extract: regularize = {}".format(args.regularize)) # get the root output file outpat = re.compile(r'(.*)\.fits') outmat = outpat.match(args.output) if outmat is None: raise RuntimeError( "extraction output file should have .fits extension") outroot = outmat.group(1) outdir = os.path.normpath(os.path.dirname(outroot)) if rank == 0: if not os.path.isdir(outdir): os.makedirs(outdir) if comm is not None: comm.barrier() failcount = 0 for b in range(myfirstbundle, myfirstbundle + mynbundle): outbundle = "{}_{:02d}.fits".format(outroot, b) outmodel = "{}_model_{:02d}.fits".format(outroot, b) log.info('extract: Rank {} starting {} spectra {}:{} at {}'.format( rank, os.path.basename(input_file), bspecmin[b], bspecmin[b] + bnspec[b], time.asctime(), )) sys.stdout.flush() #- The actual extraction try: results = ex2d(img.pix, img.ivar * (img.mask == 0), psf, bspecmin[b], bnspec[b], wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose, full_output=True) flux = results['flux'] ivar = results['ivar'] Rdata = results['resolution_data'] chi2pix = results['chi2pix'] mask = np.zeros(flux.shape, dtype=np.uint32) mask[results['pixmask_fraction'] > 0.5] |= specmask.SOMEBADPIX mask[results['pixmask_fraction'] == 1.0] |= specmask.ALLBADPIX mask[chi2pix > 100.0] |= specmask.BAD2DFIT #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP'] = (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') if fibermap is not None: bfibermap = fibermap[bspecmin[b] - specmin:bspecmin[b] + bnspec[b] - specmin] else: bfibermap = None bfibers = fibers[bspecmin[b] - specmin:bspecmin[b] + bnspec[b] - specmin] frame = Frame(wave, flux, ivar, mask=mask, resolution_data=Rdata, fibers=bfibers, meta=img.meta, fibermap=bfibermap, chi2pix=chi2pix) #- Write output io.write_frame(outbundle, frame, units='photon/bin') if args.model is not None: from astropy.io import fits fits.writeto(outmodel, results['modelimage'], header=frame.meta) log.info('extract: Done {} spectra {}:{} at {}'.format( os.path.basename(input_file), bspecmin[b], bspecmin[b] + bnspec[b], time.asctime())) sys.stdout.flush() except: # Log the error and increment the number of failures log.error( "extract: FAILED bundle {}, spectrum range {}:{}".format( b, bspecmin[b], bspecmin[b] + bnspec[b])) exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) log.error(''.join(lines)) failcount += 1 sys.stdout.flush() if comm is not None: failcount = comm.allreduce(failcount) if failcount > 0: # all processes throw raise RuntimeError("some extraction bundles failed") if rank == 0: mergeopts = ['--output', args.output, '--force', '--delete'] mergeopts.extend( ["{}_{:02d}.fits".format(outroot, b) for b in bundles]) mergeargs = mergebundles.parse(mergeopts) mergebundles.main(mergeargs) if args.model is not None: model = None for b in bundles: outmodel = "{}_model_{:02d}.fits".format(outroot, b) if model is None: model = fits.getdata(outmodel) else: #- TODO: test and warn if models overlap for pixels with #- non-zero values model += fits.getdata(outmodel) os.remove(outmodel) fits.writeto(args.model, model)
def main_mpi(args, comm=None): psf_file = args.psf input_file = args.input # these parameters are interpreted as the *global* spec range, # to be divided among processes. specmin = args.specmin nspec = args.nspec #- Load input files and broadcast # FIXME: after we have fixed the serialization # of the PSF, read and broadcast here, to reduce # disk contention. img = None if comm is None: img = io.read_image(input_file) else: if comm.rank == 0: img = io.read_image(input_file) img = comm.bcast(img, root=0) psf = load_psf(psf_file) # get spectral range if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin + nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin + nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = map(float, args.wavelength.split(',')) else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop + dw / 2.0, dw) nwave = len(wave) #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(range(specmin, specmax), y=0)) psf_wavemax = np.min( psf.wavelength(range(specmin, specmax), y=psf.npix_y - 1)) if psf_wavemin > wstart: raise ValueError, 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'.format( wstart, psf_wavemin) if psf_wavemax < wstop: raise ValueError, 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'.format( wstop, psf_wavemax) # Now we divide our spectra into bundles bundlesize = args.bundlesize checkbundles = set() checkbundles.update( np.floor_divide(np.arange(specmin, specmax), bundlesize * np.ones(nspec)).astype(int)) bundles = sorted(list(checkbundles)) nbundle = len(bundles) bspecmin = {} bnspec = {} for b in bundles: if specmin > b * bundlesize: bspecmin[b] = specmin else: bspecmin[b] = b * bundlesize if (b + 1) * bundlesize > specmax: bnspec[b] = specmax - bspecmin[b] else: bnspec[b] = bundlesize # Now we assign bundles to processes nproc = 1 rank = 0 if comm is not None: nproc = comm.size rank = comm.rank mynbundle = int(nbundle / nproc) myfirstbundle = 0 leftover = nbundle % nproc if rank < leftover: mynbundle += 1 myfirstbundle = rank * mynbundle else: myfirstbundle = ((mynbundle + 1) * leftover) + (mynbundle * (rank - leftover)) if rank == 0: #- Print parameters print "extract: input = {}".format(input_file) print "extract: psf = {}".format(psf_file) print "extract: specmin = {}".format(specmin) print "extract: nspec = {}".format(nspec) print "extract: wavelength = {},{},{}".format(wstart, wstop, dw) print "extract: nwavestep = {}".format(args.nwavestep) print "extract: regularize = {}".format(args.regularize) # get the root output file outpat = re.compile(r'(.*)\.fits') outmat = outpat.match(args.output) if outmat is None: raise RuntimeError( "extraction output file should have .fits extension") outroot = outmat.group(1) outdir = os.path.dirname(outroot) if rank == 0: if not os.path.isdir(outdir): os.makedirs(outdir) if comm is not None: comm.barrier() failcount = 0 for b in range(myfirstbundle, myfirstbundle + mynbundle): outbundle = "{}_{:02d}.fits".format(outroot, b) print('extract: Starting {} spectra {}:{} at {}'.format( os.path.basename(input_file), bspecmin[b], bspecmin[b] + bnspec[b], time.asctime())) #- The actual extraction try: flux, ivar, Rdata = ex2d(img.pix, img.ivar * (img.mask == 0), psf, bspecmin[b], bnspec[b], wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose) #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP'] = (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') bfibermap = fibermap[bspecmin[b] - specmin:bspecmin[b] + bnspec[b] - specmin] bfibers = fibers[bspecmin[b] - specmin:bspecmin[b] + bnspec[b] - specmin] frame = Frame(wave, flux, ivar, resolution_data=Rdata, fibers=bfibers, meta=img.meta, fibermap=bfibermap) #- Write output io.write_frame(outbundle, frame) print('extract: Done {} spectra {}:{} at {}'.format( os.path.basename(input_file), bspecmin[b], bspecmin[b] + bnspec[b], time.asctime())) except: failcount += 1 if comm is not None: failcount = comm.allreduce(failcount) if failcount > 0: # all processes throw raise RuntimeError("some extraction bundles failed") if rank == 0: opts = ['--output', args.output, '--force', '--delete'] opts.extend(["{}_{:02d}.fits".format(outroot, b) for b in bundles]) args = merge.parse(opts) merge.main(args)
def load_all_s2n_values(nights, channel, sub_exposures=None): """ Calculate S/N values for a set of spectra from an input list of nights Args: nights: list channel: str ('b','r','z') sub_exposures: Returns: fdict: dict Contains all the S/N info for all nights in the given channel """ fdict = dict(waves=[], s2n=[], fluxes=[], exptime=[], OII=[], objtype=[]) for night in nights: if sub_exposures is not None: exposures = sub_exposures else: exposures = get_exposures(night)#, raw=True) for exposure in exposures: fibermap_path = findfile(filetype='fibermap', night=night, expid=exposure) fibermap_data = read_fibermap(fibermap_path) flavor = fibermap_data.meta['FLAVOR'] if flavor.lower() in ('arc', 'flat', 'bias'): log.debug('Skipping calibration {} exposure {:08d}'.format(flavor, exposure)) continue # Load simspec simspec_file = fibermap_path.replace('fibermap', 'simspec') log.debug('Getting truth from {}'.format(simspec_file)) sps_hdu = fits.open(simspec_file) sps_tab = Table(sps_hdu['TRUTH'].data,masked=True) #- Get OIIFLUX from separate HDU and join if ('OIIFLUX' not in sps_tab.colnames) and ('TRUTH_ELG' in sps_hdu): elg_truth = Table(sps_hdu['TRUTH_ELG'].data) sps_tab = join(sps_tab, elg_truth['TARGETID', 'OIIFLUX'], keys='TARGETID', join_type='left') else: sps_tab['OIIFLUX'] = 0.0 sps_hdu.close() #objs = sps_tab['TEMPLATETYPE'] == objtype #if np.sum(objs) == 0: # continue # Load spectra (flux or not fluxed; should not matter) for ii in range(10): camera = channel+str(ii) cframe_path = findfile(filetype='cframe', night=night, expid=exposure, camera=camera) try: log.debug('Reading from {}'.format(cframe_path)) cframe = read_frame(cframe_path) except (IOError, OSError): log.warn("Cannot find file: {:s}".format(cframe_path)) continue # Calculate S/N per Ang dwave = cframe.wave - np.roll(cframe.wave,1) dwave[0] = dwave[1] # Calculate s2n = cframe.flux * np.sqrt(cframe.ivar) / np.sqrt(dwave) #s2n = cframe.flux[iobjs,:] * np.sqrt(cframe.ivar[iobjs,:]) / np.sqrt(dwave) # Save fdict['objtype'].append(sps_tab['TEMPLATETYPE'].data[cframe.fibers]) fdict['waves'].append(cframe.wave) fdict['s2n'].append(s2n) fdict['fluxes'].append(sps_tab['MAG'].data[cframe.fibers]) fdict['OII'].append(sps_tab['OIIFLUX'].data[cframe.fibers]) fdict['exptime'].append(cframe.meta['EXPTIME']) # Return return fdict
def integration_test(night=None, nspec=5, clobber=False): """Run an integration test from raw data simulations through redshifts Args: night (str, optional): YEARMMDD, defaults to current night nspec (int, optional): number of spectra to include clobber (bool, optional): rerun steps even if outputs already exist Raises: RuntimeError if any script fails """ log = get_logger() #- YEARMMDD string, rolls over at noon not midnight if night is None: night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600)) #- check for required environment variables check_env() #- parameter dictionary that will later be used for formatting commands params = dict(night=night, nspec=nspec) #----- #- Input fibermaps, spectra, and pixel-level raw data for expid, flavor in zip([0,1,2], ['flat', 'arc', 'science']): cmd = "pixsim-desi --newexp {flavor} --nspec {nspec} --night {night} --expid {expid}".format( expid=expid, flavor=flavor, **params) fibermap = io.findfile('fibermap', night, expid) simspec = '{}/simspec-{:08d}.fits'.format(os.path.dirname(fibermap), expid) inputs = [] outputs = [fibermap, simspec] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('pixsim newexp failed for {} exposure {}'.format(flavor, expid)) cmd = "pixsim-desi --nspec {nspec} --night {night} --expid {expid}".format(expid=expid, **params) inputs = [fibermap, simspec] outputs = list() for camera in ['b0', 'r0', 'z0']: pixfile = io.findfile('pix', night, expid, camera) outputs.append(pixfile) outputs.append(os.path.join(os.path.dirname(pixfile), os.path.basename(pixfile).replace('pix-', 'simpix-'))) if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('pixsim failed for {} exposure {}'.format(flavor, expid)) #----- #- Extract waverange = dict( b = "3570,5940,1.0", r = "5630,7740,1.0", z = "7440,9830,1.0", ) for expid in [0,1,2]: for channel in ['b', 'r', 'z']: camera = channel+'0' pixfile = io.findfile('pix', night, expid, camera) psffile = '{}/data/specpsf/psf-{}.fits'.format(os.getenv('DESIMODEL'), channel) framefile = io.findfile('frame', night, expid, camera) cmd = "exspec -i {pix} -p {psf} --specrange 0,{nspec} -w {wave} -o {frame}".format( pix=pixfile, psf=psffile, wave=waverange[channel], frame=framefile, **params) inputs = [pixfile, psffile] outputs = [framefile,] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('extraction failed for {} expid {}'.format(camera, expid)) #----- #- Fiber flat expid = 0 for channel in ['b', 'r', 'z']: camera = channel+"0" framefile = io.findfile('frame', night, expid, camera) fiberflat = io.findfile('fiberflat', night, expid, camera) cmd = "desi_compute_fiberflat.py --infile {frame} --outfile {fiberflat}".format( frame=framefile, fiberflat=fiberflat, **params) inputs = [framefile,] outputs = [fiberflat,] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('fiberflat failed for '+camera) #----- #- Sky model flat_expid = 0 expid = 2 for channel in ['b', 'r', 'z']: camera = channel+"0" framefile = io.findfile('frame', night, expid, camera) fibermap = io.findfile('fibermap', night, expid) fiberflat = io.findfile('fiberflat', night, flat_expid, camera) skyfile = io.findfile('sky', night, expid, camera) cmd="desi_compute_sky.py --infile {frame} --fibermap {fibermap} --fiberflat {fiberflat} --outfile {sky}".format( frame=framefile, fibermap=fibermap, fiberflat=fiberflat, sky=skyfile, **params) inputs = [framefile, fibermap, fiberflat] outputs = [skyfile, ] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('sky model failed for '+camera) #----- #- Fit standard stars if 'STD_TEMPLATES' in os.environ: std_templates = os.getenv('STD_TEMPLATES') else: std_templates = os.getenv('DESI_ROOT')+'/spectro/templates/stellar_templates/v1.0/stdstar_templates_v1.0.fits' stdstarfile = io.findfile('stdstars', night, expid, spectrograph=0) cmd = """desi_fit_stdstars.py --spectrograph 0 \ --fibermap {fibermap} \ --fiberflatexpid {flat_expid} \ --models {std_templates} --outfile {stdstars}""".format( flat_expid=flat_expid, fibermap=fibermap, std_templates=std_templates, stdstars=stdstarfile) inputs = [fibermap, std_templates] outputs = [stdstarfile,] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('fitting stdstars failed') #----- #- Flux calibration for channel in ['b', 'r', 'z']: camera = channel+"0" framefile = io.findfile('frame', night, expid, camera) fibermap = io.findfile('fibermap', night, expid) fiberflat = io.findfile('fiberflat', night, flat_expid, camera) skyfile = io.findfile('sky', night, expid, camera) calibfile = io.findfile('calib', night, expid, camera) #- Compute flux calibration vector cmd = """desi_compute_fluxcalibration.py \ --infile {frame} --fibermap {fibermap} --fiberflat {fiberflat} --sky {sky} \ --models {stdstars} --outfile {calib}""".format( frame=framefile, fibermap=fibermap, fiberflat=fiberflat, sky=skyfile, stdstars=stdstarfile, calib=calibfile, ) inputs = [framefile, fibermap, fiberflat, skyfile, stdstarfile] outputs = [calibfile,] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('flux calibration failed for '+camera) #- Apply the flux calibration to write a cframe file cframefile = io.findfile('cframe', night, expid, camera) cmd = """desi_process_exposure.py \ --infile {frame} --fiberflat {fiberflat} --sky {sky} --calib {calib} \ --outfile {cframe}""".format(frame=framefile, fibermap=fibermap, fiberflat=fiberflat, sky=skyfile, calib=calibfile, cframe=cframefile) inputs = [framefile, fiberflat, skyfile, calibfile] outputs = [cframefile, ] if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('combining calibration steps failed for '+camera) #----- #- Bricks inputs = list() for camera in ['b0', 'r0', 'z0']: inputs.append( io.findfile('cframe', night, expid, camera) ) outputs = list() fibermap = io.read_fibermap(io.findfile('fibermap', night, expid)) bricks = set(fibermap['BRICKNAME']) for b in bricks: for channel in ['b', 'r', 'z']: outputs.append( io.findfile('brick', brickid=b, band=channel)) cmd = "desi_make_bricks.py --night "+night if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('brick generation failed') #----- #- Redshifts! for b in bricks: inputs = [io.findfile('brick', brickid=b, band=channel) for channel in ['b', 'r', 'z']] zbestfile = io.findfile('zbest', brickid=b) outputs = [zbestfile, ] cmd = "desi_zfind.py --brick {} -o {}".format(b, zbestfile) if runcmd(cmd, inputs, outputs, clobber) != 0: raise RuntimeError('redshifts failed for brick '+b) #----- #- Did it work? #- (this combination of fibermap, simspec, and zbest is a pain) simdir = os.path.dirname(io.findfile('fibermap', night=night, expid=expid)) simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) siminfo = fits.getdata(simspec, 'METADATA') print() print("--------------------------------------------------") print("Brick True z -> Class z zwarn") # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") for b in bricks: zbest = io.read_zbest(io.findfile('zbest', brickid=b)) for i in range(len(zbest.z)): if zbest.type[i] == 'ssp_em_galaxy': objtype = 'GAL' elif zbest.type[i] == 'spEigenStar': objtype = 'STAR' else: objtype = zbest.type[i] z, zwarn = zbest.z[i], zbest.zwarn[i] j = np.where(fibermap['TARGETID'] == zbest.targetid[i])[0][0] truetype = siminfo['OBJTYPE'][j] truez = siminfo['REDSHIFT'][j] dv = 3e5*(z-truez)/(1+truez) if truetype == 'SKY' and zwarn > 0: status = 'ok' elif zwarn == 0: if truetype == 'LRG' and objtype == 'GAL' and abs(dv) < 150: status = 'ok' elif truetype == 'ELG' and objtype == 'GAL' and abs(dv) < 150: status = 'ok' elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: status = 'ok' elif truetype == 'STD' and objtype == 'STAR': status = 'ok' else: status = 'oops' else: status = 'oops' print('{0} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'.format( b, truetype, truez, objtype, z, zwarn, status)) print("--------------------------------------------------")
def integration_test(night=None, nspec=5, clobber=False): """Run an integration test from raw data simulations through redshifts Args: night (str, optional): YEARMMDD, defaults to current night nspec (int, optional): number of spectra to include clobber (bool, optional): rerun steps even if outputs already exist Raises: RuntimeError if any script fails """ import argparse parser = argparse.ArgumentParser(usage="{prog} [options]") # parser.add_argument("-i", "--input", type=str, help="input data") # parser.add_argument("-o", "--output", type=str, help="output data") parser.add_argument("--skip-psf", action="store_true", help="Skip PSF fitting step") args = parser.parse_args() log = logging.get_logger() # YEARMMDD string, rolls over at noon not midnight if night is None: night = "20160726" # check for required environment variables check_env() # simulate inputs sim(night, nspec=nspec, clobber=clobber) # raw and production locations rawdir = os.path.abspath(io.rawdata_root()) proddir = os.path.abspath(io.specprod_root()) # create production if clobber and os.path.isdir(proddir): shutil.rmtree(proddir) dbfile = io.get_pipe_database() if not os.path.exists(dbfile): com = "desi_pipe create --db-sqlite" log.info('Running {}'.format(com)) sp.check_call(com, shell=True) else: log.info("Using pre-existing production database {}".format(dbfile)) # Modify options file to restrict the spectral range optpath = os.path.join(proddir, "run", "options.yaml") opts = pipe.prod.yaml_read(optpath) opts['extract']['specmin'] = 0 opts['extract']['nspec'] = nspec opts['psf']['specmin'] = 0 opts['psf']['nspec'] = nspec opts['traceshift']['nfibers'] = nspec pipe.prod.yaml_write(optpath, opts) if args.skip_psf: #- Copy desimodel psf into this production instead of fitting psf import shutil for channel in ['b', 'r', 'z']: refpsf = '{}/data/specpsf/psf-{}.fits'.format( os.getenv('DESIMODEL'), channel) nightpsf = io.findfile('psfnight', night, camera=channel + '0') shutil.copy(refpsf, nightpsf) for expid in [0, 1, 2]: exppsf = io.findfile('psf', night, expid, camera=channel + '0') shutil.copy(refpsf, exppsf) #- Resync database to current state dbpath = io.get_pipe_database() db = pipe.load_db(dbpath, mode="w") db.sync(night) # Run the pipeline tasks in order from desispec.pipeline.tasks.base import default_task_chain for tasktype in default_task_chain: #- if we skip psf/psfnight/traceshift, update state prior to extractions if tasktype == 'traceshift' and args.skip_psf: db.getready() run_pipeline_step(tasktype) # #----- # #- Did it work? # #- (this combination of fibermap, simspec, and zbest is a pain) expid = 2 fmfile = io.findfile('fibermap', night=night, expid=expid) fibermap = io.read_fibermap(fmfile) simdir = os.path.dirname(fmfile) simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) siminfo = fits.getdata(simspec, 'TRUTH') try: elginfo = fits.getdata(simspec, 'TRUTH_ELG') except: elginfo = None from desimodel.footprint import radec2pix nside = 64 pixels = np.unique( radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC'])) num_missing = 0 for pix in pixels: zfile = io.findfile('zbest', groupname=pix) if not os.path.exists(zfile): log.error('Missing {}'.format(zfile)) num_missing += 1 if num_missing > 0: log.critical('{} zbest files missing'.format(num_missing)) sys.exit(1) print() print("--------------------------------------------------") print("Pixel True z -> Class z zwarn") # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") for pix in pixels: zfile = io.findfile('zbest', groupname=pix) if not os.path.exists(zfile): log.error('Missing {}'.format(zfile)) continue zfx = fits.open(zfile, memmap=False) zbest = zfx['ZBEST'].data for i in range(len(zbest['Z'])): objtype = zbest['SPECTYPE'][i] z, zwarn = zbest['Z'][i], zbest['ZWARN'][i] j = np.where(fibermap['TARGETID'] == zbest['TARGETID'][i])[0][0] truetype = siminfo['OBJTYPE'][j] oiiflux = 0.0 if truetype == 'ELG': k = np.where(elginfo['TARGETID'] == zbest['TARGETID'][i])[0][0] oiiflux = elginfo['OIIFLUX'][k] truez = siminfo['REDSHIFT'][j] dv = C_LIGHT * (z - truez) / (1 + truez) status = None if truetype == 'SKY' and zwarn > 0: status = 'ok' elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) elif zwarn == 0: if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150: status = 'ok' elif truetype == 'ELG' and objtype == 'GALAXY': if abs(dv) < 150: status = 'ok' elif oiiflux < 8e-17: status = 'ok ([OII] flux {:.2g})'.format(oiiflux) else: status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux) elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: status = 'ok' elif truetype in ('STD', 'FSTD') and objtype == 'STAR': status = 'ok' else: status = 'OOPS' else: status = 'OOPS' print('{0:<8d} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'. format(pix, truetype, truez, objtype, z, zwarn, status)) print("--------------------------------------------------")
def main(args=None): if args is None: args = parse() elif isinstance(args, (list, tuple)): args = parse(args) t0 = time.time() log = get_logger() # guess if it is a preprocessed or a raw image hdulist = fits.open(args.image) is_input_preprocessed = ("IMAGE" in hdulist) & ("IVAR" in hdulist) primary_header = hdulist[0].header hdulist.close() if is_input_preprocessed: image = read_image(args.image) else: if args.camera is None: print( "ERROR: Need to specify camera to open a raw fits image (with all cameras in different fits HDUs)" ) print( "Try adding the option '--camera xx', with xx in {brz}{0-9}, like r7, or type 'desi_qproc --help' for more options" ) sys.exit(12) image = read_raw(args.image, args.camera, fill_header=[ 1, ]) if args.auto: log.debug("AUTOMATIC MODE") try: night = image.meta['NIGHT'] if not 'EXPID' in image.meta: if 'EXPNUM' in image.meta: log.warning('using EXPNUM {} for EXPID'.format( image.meta['EXPNUM'])) image.meta['EXPID'] = image.meta['EXPNUM'] expid = image.meta['EXPID'] except KeyError as e: log.error( "Need at least NIGHT and EXPID (or EXPNUM) to run in auto mode. Retry without the --auto option." ) log.error(str(e)) sys.exit(12) indir = os.path.dirname(args.image) if args.fibermap is None: filename = '{}/fibermap-{:08d}.fits'.format(indir, expid) if os.path.isfile(filename): log.debug("auto-mode: found a fibermap, {}, using it!".format( filename)) args.fibermap = filename if args.output_preproc is None: if not is_input_preprocessed: args.output_preproc = '{}/preproc-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera.lower(), expid) log.debug("auto-mode: will write preproc in " + args.output_preproc) else: log.debug( "auto-mode: will not write preproc because input is a preprocessed image" ) if args.auto_output_dir != '.': if not os.path.isdir(args.auto_output_dir): log.debug("auto-mode: creating directory " + args.auto_output_dir) os.makedirs(args.auto_output_dir) if args.output_preproc is not None: write_image(args.output_preproc, image) cfinder = None if args.psf is None: if cfinder is None: cfinder = CalibFinder([image.meta, primary_header]) args.psf = cfinder.findfile("PSF") log.info(" Using PSF {}".format(args.psf)) tset = read_xytraceset(args.psf) # add fibermap if args.fibermap: if os.path.isfile(args.fibermap): fibermap = read_fibermap(args.fibermap) else: log.error("no fibermap file {}".format(args.fibermap)) fibermap = None else: fibermap = None if "OBSTYPE" in image.meta: obstype = image.meta["OBSTYPE"].upper() image.meta["OBSTYPE"] = obstype # make sure it's upper case qframe = None else: log.warning("No OBSTYPE keyword, trying to guess ...") qframe = qproc_boxcar_extraction(tset, image, width=args.width, fibermap=fibermap) obstype = check_qframe_flavor( qframe, input_flavor=image.meta["FLAVOR"]).upper() image.meta["OBSTYPE"] = obstype log.info("OBSTYPE = '{}'".format(obstype)) if args.auto: # now set the things to do if obstype == "SKY" or obstype == "TWILIGHT" or obstype == "SCIENCE": args.shift_psf = True args.output_psf = '{}/psf-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.output_rawframe = '{}/qframe-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.apply_fiberflat = True args.skysub = True args.output_skyframe = '{}/qsky-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.fluxcalib = True args.outframe = '{}/qcframe-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) elif obstype == "ARC" or obstype == "TESTARC": args.shift_psf = True args.output_psf = '{}/psf-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.output_rawframe = '{}/qframe-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.compute_lsf_sigma = True elif obstype == "FLAT" or obstype == "TESTFLAT": args.shift_psf = True args.output_psf = '{}/psf-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.output_rawframe = '{}/qframe-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) args.compute_fiberflat = '{}/qfiberflat-{}-{:08d}.fits'.format( args.auto_output_dir, args.camera, expid) if args.shift_psf: # using the trace shift script if args.auto: options = option_list({ "psf": args.psf, "image": "dummy", "outpsf": "dummy", "continuum": ((obstype == "FLAT") | (obstype == "TESTFLAT")), "sky": ((obstype == "SCIENCE") | (obstype == "SKY")) }) else: options = option_list({ "psf": args.psf, "image": "dummy", "outpsf": "dummy" }) tmp_args = trace_shifts_script.parse(options=options) tset = trace_shifts_script.fit_trace_shifts(image=image, args=tmp_args) qframe = qproc_boxcar_extraction(tset, image, width=args.width, fibermap=fibermap) if tset.meta is not None: # add traceshift info in the qframe, this will be saved in the qframe header if qframe.meta is None: qframe.meta = dict() for k in tset.meta.keys(): qframe.meta[k] = tset.meta[k] if args.output_rawframe is not None: write_qframe(args.output_rawframe, qframe) log.info("wrote raw extracted frame in {}".format( args.output_rawframe)) if args.compute_lsf_sigma: tset = process_arc(qframe, tset, linelist=None, npoly=2, nbins=2) if args.output_psf is not None: for k in qframe.meta: if k not in tset.meta: tset.meta[k] = qframe.meta[k] write_xytraceset(args.output_psf, tset) if args.compute_fiberflat is not None: fiberflat = qproc_compute_fiberflat(qframe) #write_qframe(args.compute_fiberflat,qflat) write_fiberflat(args.compute_fiberflat, fiberflat, header=qframe.meta) log.info("wrote fiberflat in {}".format(args.compute_fiberflat)) if args.apply_fiberflat or args.input_fiberflat: if args.input_fiberflat is None: if cfinder is None: cfinder = CalibFinder([image.meta, primary_header]) try: args.input_fiberflat = cfinder.findfile("FIBERFLAT") except KeyError as e: log.error("no FIBERFLAT for this spectro config") sys.exit(12) log.info("applying fiber flat {}".format(args.input_fiberflat)) flat = read_fiberflat(args.input_fiberflat) qproc_apply_fiberflat(qframe, flat) if args.skysub: log.info("sky subtraction") if args.output_skyframe is not None: skyflux = qproc_sky_subtraction(qframe, return_skymodel=True) sqframe = QFrame(qframe.wave, skyflux, np.ones(skyflux.shape)) write_qframe(args.output_skyframe, sqframe) log.info("wrote sky model in {}".format(args.output_skyframe)) else: qproc_sky_subtraction(qframe) if args.fluxcalib: if cfinder is None: cfinder = CalibFinder([image.meta, primary_header]) # check for flux calib if cfinder.haskey("FLUXCALIB"): fluxcalib_filename = cfinder.findfile("FLUXCALIB") fluxcalib = read_average_flux_calibration(fluxcalib_filename) log.info("read average calib in {}".format(fluxcalib_filename)) seeing = qframe.meta["SEEING"] airmass = qframe.meta["AIRMASS"] exptime = qframe.meta["EXPTIME"] exposure_calib = fluxcalib.value(seeing=seeing, airmass=airmass) for q in range(qframe.nspec): fiber_calib = np.interp(qframe.wave[q], fluxcalib.wave, exposure_calib) * exptime inv_calib = (fiber_calib > 0) / (fiber_calib + (fiber_calib == 0)) qframe.flux[q] *= inv_calib qframe.ivar[q] *= fiber_calib**2 * (fiber_calib > 0) # add keyword in header giving the calibration factor applied at a reference wavelength band = qframe.meta["CAMERA"].upper()[0] if band == "B": refwave = 4500 elif band == "R": refwave = 6500 else: refwave = 8500 calvalue = np.interp(refwave, fluxcalib.wave, exposure_calib) * exptime qframe.meta["CALWAVE"] = refwave qframe.meta["CALVALUE"] = calvalue else: log.error( "Cannot calibrate fluxes because no FLUXCALIB keywork in calibration files" ) fibers = parse_fibers(args.fibers) if fibers is None: fibers = qframe.flux.shape[0] else: ii = np.arange(qframe.fibers.size)[np.in1d(qframe.fibers, fibers)] if ii.size == 0: log.error("no such fibers in frame,") log.error("fibers are in range [{}:{}]".format( qframe.fibers[0], qframe.fibers[-1] + 1)) sys.exit(12) qframe = qframe[ii] if args.outframe is not None: write_qframe(args.outframe, qframe) log.info("wrote {}".format(args.outframe)) t1 = time.time() log.info("all done in {:3.1f} sec".format(t1 - t0)) if args.plot: log.info("plotting {} spectra".format(qframe.wave.shape[0])) import matplotlib.pyplot as plt fig = plt.figure() for i in range(qframe.wave.shape[0]): j = (qframe.ivar[i] > 0) plt.plot(qframe.wave[i, j], qframe.flux[i, j]) plt.grid() plt.xlabel("wavelength") plt.ylabel("flux") plt.show()
def main_mpi(args, comm=None): psf_file = args.psf input_file = args.input # these parameters are interpreted as the *global* spec range, # to be divided among processes. specmin = args.specmin nspec = args.nspec #- Load input files and broadcast # FIXME: after we have fixed the serialization # of the PSF, read and broadcast here, to reduce # disk contention. img = None if comm is None: img = io.read_image(input_file) else: if comm.rank == 0: img = io.read_image(input_file) img = comm.bcast(img, root=0) psf = load_psf(psf_file) # get spectral range if nspec is None: nspec = psf.nspec specmax = specmin + nspec camera = img.meta['CAMERA'].lower() #- b0, r1, .. z9 spectrograph = int(camera[1]) fibermin = spectrograph * psf.nspec + specmin if args.fibermap is not None: fibermap = io.read_fibermap(args.fibermap) fibermap = fibermap[fibermin:fibermin+nspec] fibers = fibermap['FIBER'] else: fibermap = None fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') #- Get wavelength grid from options if args.wavelength is not None: wstart, wstop, dw = map(float, args.wavelength.split(',')) else: wstart = np.ceil(psf.wmin_all) wstop = np.floor(psf.wmax_all) dw = 0.5 wave = np.arange(wstart, wstop+dw/2.0, dw) nwave = len(wave) #- Confirm that this PSF covers these wavelengths for these spectra psf_wavemin = np.max(psf.wavelength(range(specmin, specmax), y=0)) psf_wavemax = np.min(psf.wavelength(range(specmin, specmax), y=psf.npix_y-1)) if psf_wavemin > wstart: raise ValueError, 'Start wavelength {:.2f} < min wavelength {:.2f} for these fibers'.format(wstart, psf_wavemin) if psf_wavemax < wstop: raise ValueError, 'Stop wavelength {:.2f} > max wavelength {:.2f} for these fibers'.format(wstop, psf_wavemax) # Now we divide our spectra into bundles bundlesize = args.bundlesize checkbundles = set() checkbundles.update(np.floor_divide(np.arange(specmin, specmax), bundlesize*np.ones(nspec)).astype(int)) bundles = sorted(list(checkbundles)) nbundle = len(bundles) bspecmin = {} bnspec = {} for b in bundles: if specmin > b * bundlesize: bspecmin[b] = specmin else: bspecmin[b] = b * bundlesize if (b+1) * bundlesize > specmax: bnspec[b] = specmax - bspecmin[b] else: bnspec[b] = bundlesize # Now we assign bundles to processes nproc = 1 rank = 0 if comm is not None: nproc = comm.size rank = comm.rank mynbundle = int(nbundle // nproc) myfirstbundle = 0 leftover = nbundle % nproc if rank < leftover: mynbundle += 1 myfirstbundle = rank * mynbundle else: myfirstbundle = ((mynbundle + 1) * leftover) + (mynbundle * (rank - leftover)) if rank == 0: #- Print parameters print "extract: input = {}".format(input_file) print "extract: psf = {}".format(psf_file) print "extract: specmin = {}".format(specmin) print "extract: nspec = {}".format(nspec) print "extract: wavelength = {},{},{}".format(wstart, wstop, dw) print "extract: nwavestep = {}".format(args.nwavestep) print "extract: regularize = {}".format(args.regularize) # get the root output file outpat = re.compile(r'(.*)\.fits') outmat = outpat.match(args.output) if outmat is None: raise RuntimeError("extraction output file should have .fits extension") outroot = outmat.group(1) outdir = os.path.normpath(os.path.dirname(outroot)) if rank == 0: if not os.path.isdir(outdir): os.makedirs(outdir) if comm is not None: comm.barrier() failcount = 0 for b in range(myfirstbundle, myfirstbundle+mynbundle): outbundle = "{}_{:02d}.fits".format(outroot, b) outmodel = "{}_model_{:02d}.fits".format(outroot, b) print('extract: Rank {} starting {} spectra {}:{} at {}'.format( rank, os.path.basename(input_file), bspecmin[b], bspecmin[b]+bnspec[b], time.asctime(), ) ) #- The actual extraction try: results = ex2d(img.pix, img.ivar*(img.mask==0), psf, bspecmin[b], bnspec[b], wave, regularize=args.regularize, ndecorr=True, bundlesize=bundlesize, wavesize=args.nwavestep, verbose=args.verbose, full_output=True) flux = results['flux'] ivar = results['ivar'] Rdata = results['resolution_data'] chi2pix = results['chi2pix'] mask = np.zeros(flux.shape, dtype=np.uint32) mask[results['pixmask_fraction']>0.5] |= specmask.SOMEBADPIX mask[results['pixmask_fraction']==1.0] |= specmask.ALLBADPIX mask[chi2pix>100.0] |= specmask.BAD2DFIT #- Augment input image header for output img.meta['NSPEC'] = (nspec, 'Number of spectra') img.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') img.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') img.meta['WAVESTEP']= (dw, 'Wavelength step size [Angstroms]') img.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') img.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') img.meta['IN_IMG'] = (_trim(input_file), 'Input image') if fibermap is not None: bfibermap = fibermap[bspecmin[b]-specmin:bspecmin[b]+bnspec[b]-specmin] else: bfibermap = None bfibers = fibers[bspecmin[b]-specmin:bspecmin[b]+bnspec[b]-specmin] frame = Frame(wave, flux, ivar, mask=mask, resolution_data=Rdata, fibers=bfibers, meta=img.meta, fibermap=bfibermap, chi2pix=chi2pix) #- Write output io.write_frame(outbundle, frame) if args.model is not None: from astropy.io import fits fits.writeto(outmodel, results['modelimage'], header=frame.meta) print('extract: Done {} spectra {}:{} at {}'.format(os.path.basename(input_file), bspecmin[b], bspecmin[b]+bnspec[b], time.asctime())) except: failcount += 1 if comm is not None: failcount = comm.allreduce(failcount) if failcount > 0: # all processes throw raise RuntimeError("some extraction bundles failed") if rank == 0: mergeopts = [ '--output', args.output, '--force', '--delete' ] mergeopts.extend([ "{}_{:02d}.fits".format(outroot, b) for b in bundles ]) mergeargs = mergebundles.parse(mergeopts) mergebundles.main(mergeargs) if args.model is not None: model = None for b in bundles: outmodel = "{}_model_{:02d}.fits".format(outroot, b) if model is None: model = fits.getdata(outmodel) else: #- TODO: test and warn if models overlap for pixels with #- non-zero values model += fits.getdata(outmodel) os.remove(outmodel) fits.writeto(args.model, model)
def graph_night(rawdir, rawnight): grph = {} node = {} node['type'] = 'night' node['in'] = [] node['out'] = [] grph[rawnight] = node allbricks = {} expcount = {} expcount['flat'] = 0 expcount['arc'] = 0 expcount['science'] = 0 # First, insert raw data into the graph. We use the existence of the raw data # as a filter over spectrographs. Spectrographs whose raw data do not exist # are excluded from the graph. expid = io.get_exposures(rawnight, raw=True, rawdata_dir=rawdir) campat = re.compile(r'([brz])([0-9])') keepspec = set() for ex in sorted(expid): # get the fibermap for this exposure fibermap = io.get_raw_files("fibermap", rawnight, ex, rawdata_dir=rawdir) # read the fibermap to get the exposure type, and while we are at it, # also accumulate the total list of bricks fmdata, fmheader = io.read_fibermap(fibermap, header=True) flavor = fmheader['flavor'] fmbricks = {} for fmb in fmdata['BRICKNAME']: if len(fmb) > 0: if fmb in fmbricks.keys(): fmbricks[fmb] += 1 else: fmbricks[fmb] = 1 for fmb in fmbricks.keys(): if fmb in allbricks.keys(): allbricks[fmb] += fmbricks[fmb] else: allbricks[fmb] = fmbricks[fmb] if flavor == 'arc': expcount['arc'] += 1 elif flavor == 'flat': expcount['flat'] += 1 else: expcount['science'] += 1 node = {} node['type'] = 'fibermap' node['id'] = ex node['flavor'] = flavor node['bricks'] = fmbricks node['in'] = [rawnight] node['out'] = [] name = graph_name(rawnight, "fibermap-{:08d}".format(ex)) grph[name] = node grph[rawnight]['out'].append(name) # get the raw exposures raw = io.get_raw_files("pix", rawnight, ex, rawdata_dir=rawdir) for cam in sorted(raw.keys()): cammat = campat.match(cam) if cammat is None: raise RuntimeError("invalid camera string {}".format(cam)) band = cammat.group(1) spec = cammat.group(2) keepspec.update(spec) node = {} node['type'] = 'pix' node['id'] = ex node['band'] = band node['spec'] = spec node['flavor'] = flavor node['in'] = [rawnight] node['out'] = [] name = graph_name(rawnight, "pix-{}{}-{:08d}".format(band, spec, ex)) grph[name] = node grph[rawnight]['out'].append(name) keep = sorted(list(keepspec)) # Now that we have added all the raw data to the graph, we work our way # through the processing steps. # This step is a placeholder, in case we want to combine information from # multiple flats or arcs before running bootcalib. We mark these bootcalib # outputs as depending on all arcs and flats, but in reality we may just # use the first or last set. # Since each psfboot file takes multiple exposures as input, we first # create those nodes. for band in ['b', 'r', 'z']: for spec in keep: name = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) node = {} node['type'] = 'psfboot' node['band'] = band node['spec'] = spec node['in'] = [] node['out'] = [] grph[name] = node for name, nd in grph.items(): if nd['type'] != 'pix': continue if (nd['flavor'] != 'flat') and (nd['flavor'] != 'arc'): continue band = nd['band'] spec = nd['spec'] bootname = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) grph[bootname]['in'].append(name) nd['out'].append(bootname) # Next is full PSF estimation. Inputs are the arc image and the bootcalib # output file. We also add nodes for the combined psfs. for band in ['b', 'r', 'z']: for spec in keep: name = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) node = {} node['type'] = 'psfnight' node['band'] = band node['spec'] = spec node['in'] = [] node['out'] = [] grph[name] = node for name, nd in grph.items(): if nd['type'] != 'pix': continue if nd['flavor'] != 'arc': continue band = nd['band'] spec = nd['spec'] id = nd['id'] bootname = graph_name(rawnight, "psfboot-{}{}".format(band, spec)) psfname = graph_name(rawnight, "psf-{}{}-{:08d}".format(band, spec, id)) psfnightname = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) node = {} node['type'] = 'psf' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, bootname] node['out'] = [psfnightname] grph[psfname] = node grph[bootname]['out'].append(psfname) grph[psfnightname]['in'].append(psfname) nd['out'].append(psfname) # Now we extract the flats and science frames using the nightly psf for name, nd in grph.items(): if nd['type'] != 'pix': continue if nd['flavor'] == 'arc': continue band = nd['band'] spec = nd['spec'] id = nd['id'] flavor = nd['flavor'] framename = graph_name(rawnight, "frame-{}{}-{:08d}".format(band, spec, id)) psfnightname = graph_name(rawnight, "psfnight-{}{}".format(band, spec)) fmname = graph_name(rawnight, "fibermap-{:08d}".format(id)) node = {} node['type'] = 'frame' node['band'] = band node['spec'] = spec node['id'] = id node['flavor'] = flavor node['in'] = [name, fmname, psfnightname] node['out'] = [] grph[framename] = node grph[psfnightname]['out'].append(framename) grph[fmname]['out'].append(framename) nd['out'].append(framename) # Now build the fiberflats for each flat exposure. We keep a list of all # available fiberflats while we are looping over them, since we'll need # that in the next step to select the "most recent" fiberflat. flatexpid = {} for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] != 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'fiberflat' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name] node['out'] = [] grph[flatname] = node nd['out'].append(flatname) cam = "{}{}".format(band, spec) if cam not in flatexpid.keys(): flatexpid[cam] = [] flatexpid[cam].append(id) # To compute the sky file, we use the "most recent fiberflat" that came # before the current exposure. for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) node = {} node['type'] = 'sky' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname] node['out'] = [] grph[skyname] = node nd['out'].append(skyname) grph[flatname]['out'].append(skyname) # Construct the standard star files. These are one per spectrograph, # and depend on the frames and the corresponding flats and sky files. stdgrph = {} for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] starname = graph_name(rawnight, "stdstars-{}-{:08d}".format(spec, id)) # does this spectrograph exist yet in the graph? if starname not in stdgrph.keys(): fmname = graph_name(rawnight, "fibermap-{:08d}".format(id)) grph[fmname]['out'].append(starname) node = {} node['type'] = 'stdstars' node['spec'] = spec node['id'] = id node['in'] = [fmname] node['out'] = [] stdgrph[starname] = node cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) stdgrph[starname]['in'].extend([skyname, name, flatname]) nd['out'].append(starname) grph[flatname]['out'].append(starname) grph[skyname]['out'].append(starname) grph.update(stdgrph) # Construct calibration files for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) starname = graph_name(rawnight, "stdstars-{}-{:08d}".format(spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) calname = graph_name(rawnight, "calib-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'calib' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname, skyname, starname] node['out'] = [] grph[calname] = node grph[flatname]['out'].append(calname) grph[skyname]['out'].append(calname) grph[starname]['out'].append(calname) nd['out'].append(calname) # Build cframe files for name, nd in grph.items(): if nd['type'] != 'frame': continue if nd['flavor'] == 'flat': continue band = nd['band'] spec = nd['spec'] id = nd['id'] cam = "{}{}".format(band, spec) flatid = None for fid in sorted(flatexpid[cam]): if (flatid is None): flatid = fid elif (fid > flatid) and (fid < id): flatid = fid skyname = graph_name(rawnight, "sky-{}{}-{:08d}".format(band, spec, id)) flatname = graph_name(rawnight, "fiberflat-{}{}-{:08d}".format(band, spec, fid)) calname = graph_name(rawnight, "calib-{}{}-{:08d}".format(band, spec, id)) cfname = graph_name(rawnight, "cframe-{}{}-{:08d}".format(band, spec, id)) node = {} node['type'] = 'cframe' node['band'] = band node['spec'] = spec node['id'] = id node['in'] = [name, flatname, skyname, calname] node['out'] = [] grph[cfname] = node grph[flatname]['out'].append(cfname) grph[skyname]['out'].append(cfname) grph[calname]['out'].append(cfname) nd['out'].append(cfname) # Brick / Zbest dependencies for b in allbricks.keys(): zbname = "zbest-{}".format(b) inb = [] for band in ['b', 'r', 'z']: node = {} node['type'] = 'brick' node['brick'] = b node['band'] = band node['in'] = [] node['out'] = [zbname] bname = "brick-{}-{}".format(band, b) inb.append(bname) grph[bname] = node node = {} node['type'] = 'zbest' node['brick'] = b node['ntarget'] = allbricks[b] node['in'] = inb node['out'] = [] grph[zbname] = node for name, nd in grph.items(): if nd['type'] != 'fibermap': continue if nd['flavor'] == 'arc': continue if nd['flavor'] == 'flat': continue id = nd['id'] bricks = nd['bricks'] for band in ['b', 'r', 'z']: for spec in keep: cfname = graph_name(rawnight, "cframe-{}{}-{:08d}".format(band, spec, id)) for b in bricks: bname = "brick-{}-{}".format(band, b) grph[bname]['in'].append(cfname) grph[cfname]['out'].append(bname) return (grph, expcount, allbricks)
def main(args): # Set up the logger if args.verbose: log = get_logger(DEBUG) else: log = get_logger() # Make sure all necessary environment variables are set DESI_SPECTRO_REDUX_DIR = "./quickGen" if 'DESI_SPECTRO_REDUX' not in os.environ: log.info('DESI_SPECTRO_REDUX environment is not set.') else: DESI_SPECTRO_REDUX_DIR = os.environ['DESI_SPECTRO_REDUX'] if os.path.exists(DESI_SPECTRO_REDUX_DIR): if not os.path.isdir(DESI_SPECTRO_REDUX_DIR): raise RuntimeError("Path %s Not a directory" % DESI_SPECTRO_REDUX_DIR) else: try: os.makedirs(DESI_SPECTRO_REDUX_DIR) except: raise SPECPROD_DIR = 'specprod' if 'SPECPROD' not in os.environ: log.info('SPECPROD environment is not set.') else: SPECPROD_DIR = os.environ['SPECPROD'] prod_Dir = specprod_root() if os.path.exists(prod_Dir): if not os.path.isdir(prod_Dir): raise RuntimeError("Path %s Not a directory" % prod_Dir) else: try: os.makedirs(prod_Dir) except: raise # Initialize random number generator to use. np.random.seed(args.seed) random_state = np.random.RandomState(args.seed) # Derive spectrograph number from nstart if needed if args.spectrograph is None: args.spectrograph = args.nstart / 500 # Read fibermapfile to get object type, night and expid if args.fibermap: log.info("Reading fibermap file {}".format(args.fibermap)) fibermap = read_fibermap(args.fibermap) objtype = get_source_types(fibermap) stdindx = np.where(objtype == 'STD') # match STD with STAR mwsindx = np.where(objtype == 'MWS_STAR') # match MWS_STAR with STAR bgsindx = np.where(objtype == 'BGS') # match BGS with LRG objtype[stdindx] = 'STAR' objtype[mwsindx] = 'STAR' objtype[bgsindx] = 'LRG' NIGHT = fibermap.meta['NIGHT'] EXPID = fibermap.meta['EXPID'] else: # Create a blank fake fibermap fibermap = empty_fibermap(args.nspec) targetids = random_state.randint(2**62, size=args.nspec) fibermap['TARGETID'] = targetids night = get_night() expid = 0 log.info("Initializing SpecSim with config {}".format(args.config)) desiparams = load_desiparams() qsim = get_simulator(args.config, num_fibers=1) if args.simspec: # Read the input file log.info('Reading input file {}'.format(args.simspec)) simspec = desisim.io.read_simspec(args.simspec) nspec = simspec.nspec if simspec.flavor == 'arc': log.warning("quickgen doesn't generate flavor=arc outputs") return else: wavelengths = simspec.wave spectra = simspec.flux if nspec < args.nspec: log.info("Only {} spectra in input file".format(nspec)) args.nspec = nspec else: # Initialize the output truth table. spectra = [] wavelengths = qsim.source.wavelength_out.to(u.Angstrom).value npix = len(wavelengths) truth = dict() meta = Table() truth['OBJTYPE'] = np.zeros(args.nspec, dtype=(str, 10)) truth['FLUX'] = np.zeros((args.nspec, npix)) truth['WAVE'] = wavelengths jj = list() for thisobj in set(true_objtype): ii = np.where(true_objtype == thisobj)[0] nobj = len(ii) truth['OBJTYPE'][ii] = thisobj log.info('Generating {} template'.format(thisobj)) # Generate the templates if thisobj == 'ELG': elg = desisim.templates.ELG(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = elg.make_templates( nmodel=nobj, seed=args.seed, zrange=args.zrange_elg, sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj == 'LRG': lrg = desisim.templates.LRG(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = lrg.make_templates( nmodel=nobj, seed=args.seed, zrange=args.zrange_lrg, sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj == 'QSO': qso = desisim.templates.QSO(wave=wavelengths) flux, tmpwave, meta1 = qso.make_templates( nmodel=nobj, seed=args.seed, zrange=args.zrange_qso) elif thisobj == 'BGS': bgs = desisim.templates.BGS(wave=wavelengths, add_SNeIa=args.add_SNeIa) flux, tmpwave, meta1 = bgs.make_templates( nmodel=nobj, seed=args.seed, zrange=args.zrange_bgs, rmagrange=args.rmagrange_bgs, sne_rfluxratiorange=args.sne_rfluxratiorange) elif thisobj == 'STD': std = desisim.templates.STD(wave=wavelengths) flux, tmpwave, meta1 = std.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'QSO_BAD': # use STAR template no color cuts star = desisim.templates.STAR(wave=wavelengths) flux, tmpwave, meta1 = star.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'MWS_STAR' or thisobj == 'MWS': mwsstar = desisim.templates.MWS_STAR(wave=wavelengths) flux, tmpwave, meta1 = mwsstar.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'WD': wd = desisim.templates.WD(wave=wavelengths) flux, tmpwave, meta1 = wd.make_templates(nmodel=nobj, seed=args.seed) elif thisobj == 'SKY': flux = np.zeros((nobj, npix)) meta1 = Table(dict(REDSHIFT=np.zeros(nobj, dtype=np.float32))) elif thisobj == 'TEST': flux = np.zeros((args.nspec, npix)) indx = np.where(wave > 5800.0 - 1E-6)[0][0] ref_integrated_flux = 1E-10 ref_cst_flux_density = 1E-17 single_line = (np.arange(args.nspec) % 2 == 0).astype( np.float32) continuum = (np.arange(args.nspec) % 2 == 1).astype(np.float32) for spec in range(args.nspec): flux[spec, indx] = single_line[ spec] * ref_integrated_flux / np.gradient(wavelengths)[ indx] # single line flux[spec] += continuum[ spec] * ref_cst_flux_density # flat continuum meta1 = Table( dict(REDSHIFT=np.zeros(args.nspec, dtype=np.float32), LINE=wave[indx] * np.ones(args.nspec, dtype=np.float32), LINEFLUX=single_line * ref_integrated_flux, CONSTFLUXDENSITY=continuum * ref_cst_flux_density)) else: log.fatal('Unknown object type {}'.format(thisobj)) sys.exit(1) # Pack it in. truth['FLUX'][ii] = flux meta = vstack([meta, meta1]) jj.append(ii.tolist()) # Sanity check on units; templates currently return ergs, not 1e-17 ergs... # assert (thisobj == 'SKY') or (np.max(truth['FLUX']) < 1e-6) # Sort the metadata table. jj = sum(jj, []) meta_new = Table() for k in range(args.nspec): index = int(np.where(np.array(jj) == k)[0]) meta_new = vstack([meta_new, meta[index]]) meta = meta_new # Add TARGETID and the true OBJTYPE to the metadata table. meta.add_column( Column(true_objtype, dtype=(str, 10), name='TRUE_OBJTYPE')) meta.add_column(Column(targetids, name='TARGETID')) # Rename REDSHIFT -> TRUEZ anticipating later table joins with zbest.Z meta.rename_column('REDSHIFT', 'TRUEZ') # explicitly set location on focal plane if needed to support airmass # variations when using specsim v0.5 if qsim.source.focal_xy is None: qsim.source.focal_xy = (u.Quantity(0, 'mm'), u.Quantity(100, 'mm')) # Set simulation parameters from the simspec header or desiparams bright_objects = ['bgs', 'mws', 'bright', 'BGS', 'MWS', 'BRIGHT_MIX'] gray_objects = ['gray', 'grey'] if args.simspec is None: object_type = objtype flavor = None elif simspec.flavor == 'science': object_type = None flavor = simspec.header['PROGRAM'] else: object_type = None flavor = simspec.flavor log.warning( 'Maybe using an outdated simspec file with flavor={}'.format( flavor)) # Set airmass if args.airmass is not None: qsim.atmosphere.airmass = args.airmass elif args.simspec and 'AIRMASS' in simspec.header: qsim.atmosphere.airmass = simspec.header['AIRMASS'] else: qsim.atmosphere.airmass = 1.25 # Science Req. Doc L3.3.2 # Set exptime if args.exptime is not None: qsim.observation.exposure_time = args.exptime * u.s elif args.simspec and 'EXPTIME' in simspec.header: qsim.observation.exposure_time = simspec.header['EXPTIME'] * u.s elif objtype in bright_objects: qsim.observation.exposure_time = desiparams['exptime_bright'] * u.s else: qsim.observation.exposure_time = desiparams['exptime_dark'] * u.s # Set Moon Phase if args.moon_phase is not None: qsim.atmosphere.moon.moon_phase = args.moon_phase elif args.simspec and 'MOONFRAC' in simspec.header: qsim.atmosphere.moon.moon_phase = simspec.header['MOONFRAC'] elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.moon_phase = 0.7 elif flavor in gray_objects: qsim.atmosphere.moon.moon_phase = 0.1 else: qsim.atmosphere.moon.moon_phase = 0.5 # Set Moon Zenith if args.moon_zenith is not None: qsim.atmosphere.moon.moon_zenith = args.moon_zenith * u.deg elif args.simspec and 'MOONALT' in simspec.header: qsim.atmosphere.moon.moon_zenith = simspec.header['MOONALT'] * u.deg elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.moon_zenith = 30 * u.deg elif flavor in gray_objects: qsim.atmosphere.moon.moon_zenith = 80 * u.deg else: qsim.atmosphere.moon.moon_zenith = 100 * u.deg # Set Moon - Object Angle if args.moon_angle is not None: qsim.atmosphere.moon.separation_angle = args.moon_angle * u.deg elif args.simspec and 'MOONSEP' in simspec.header: qsim.atmosphere.moon.separation_angle = simspec.header[ 'MOONSEP'] * u.deg elif flavor in bright_objects or object_type in bright_objects: qsim.atmosphere.moon.separation_angle = 50 * u.deg elif flavor in gray_objects: qsim.atmosphere.moon.separation_angle = 60 * u.deg else: qsim.atmosphere.moon.separation_angle = 60 * u.deg # Initialize per-camera output arrays that will be saved waves, trueflux, noisyflux, obsivar, resolution, sflux = {}, {}, {}, {}, {}, {} maxbin = 0 nmax = args.nspec for camera in qsim.instrument.cameras: # Lookup this camera's resolution matrix and convert to the sparse # format used in desispec. R = Resolution(camera.get_output_resolution_matrix()) resolution[camera.name] = np.tile(R.to_fits_array(), [args.nspec, 1, 1]) waves[camera.name] = (camera.output_wavelength.to( u.Angstrom).value.astype(np.float32)) nwave = len(waves[camera.name]) maxbin = max(maxbin, len(waves[camera.name])) nobj = np.zeros((nmax, 3, maxbin)) # object photons nsky = np.zeros((nmax, 3, maxbin)) # sky photons nivar = np.zeros((nmax, 3, maxbin)) # inverse variance (object+sky) cframe_observedflux = np.zeros( (nmax, 3, maxbin)) # calibrated object flux cframe_ivar = np.zeros( (nmax, 3, maxbin)) # inverse variance of calibrated object flux cframe_rand_noise = np.zeros( (nmax, 3, maxbin)) # random Gaussian noise to calibrated flux sky_ivar = np.zeros((nmax, 3, maxbin)) # inverse variance of sky sky_rand_noise = np.zeros( (nmax, 3, maxbin)) # random Gaussian noise to sky only frame_rand_noise = np.zeros( (nmax, 3, maxbin)) # random Gaussian noise to nobj+nsky trueflux[camera.name] = np.empty( (args.nspec, nwave)) # calibrated flux noisyflux[camera.name] = np.empty( (args.nspec, nwave)) # observed flux with noise obsivar[camera.name] = np.empty( (args.nspec, nwave)) # inverse variance of flux if args.simspec: for i in range(10): cn = camera.name + str(i) if cn in simspec.cameras: dw = np.gradient(simspec.cameras[cn].wave) break else: raise RuntimeError( 'Unable to find a {} camera in input simspec'.format( camera)) else: sflux = np.empty((args.nspec, npix)) #- Check if input simspec is for a continuum flat lamp instead of science #- This does not convolve to per-fiber resolution if args.simspec: if simspec.flavor == 'flat': log.info("Simulating flat lamp exposure") for i, camera in enumerate(qsim.instrument.cameras): channel = camera.name #- from simspec, b/r/z not b0/r1/z9 assert camera.output_wavelength.unit == u.Angstrom num_pixels = len(waves[channel]) phot = list() for j in range(10): cn = camera.name + str(j) if cn in simspec.cameras: camwave = simspec.cameras[cn].wave dw = np.gradient(camwave) phot.append(simspec.cameras[cn].phot) if len(phot) == 0: raise RuntimeError( 'Unable to find a {} camera in input simspec'.format( camera)) else: phot = np.vstack(phot) meanspec = resample_flux(waves[channel], camwave, np.average(phot / dw, axis=0)) fiberflat = random_state.normal(loc=1.0, scale=1.0 / np.sqrt(meanspec), size=(nspec, num_pixels)) ivar = np.tile(meanspec, [nspec, 1]) mask = np.zeros((simspec.nspec, num_pixels), dtype=np.uint32) for kk in range((args.nspec + args.nstart - 1) // 500 + 1): camera = channel + str(kk) outfile = desispec.io.findfile('fiberflat', NIGHT, EXPID, camera) start = max(500 * kk, args.nstart) end = min(500 * (kk + 1), nmax) if (args.spectrograph <= kk): log.info( "Writing files for channel:{}, spectrograph:{}, spectra:{} to {}" .format(channel, kk, start, end)) ff = FiberFlat(waves[channel], fiberflat[start:end, :], ivar[start:end, :], mask[start:end, :], meanspec, header=dict(CAMERA=camera)) write_fiberflat(outfile, ff) filePath = desispec.io.findfile("fiberflat", NIGHT, EXPID, camera) log.info("Wrote file {}".format(filePath)) sys.exit(0) # Repeat the simulation for all spectra fluxunits = 1e-17 * u.erg / (u.s * u.cm**2 * u.Angstrom) for j in range(args.nspec): thisobjtype = objtype[j] sys.stdout.flush() if flavor == 'arc': qsim.source.update_in('Quickgen source {0}'.format, 'perfect', wavelengths * u.Angstrom, spectra * fluxunits) else: qsim.source.update_in('Quickgen source {0}'.format(j), thisobjtype.lower(), wavelengths * u.Angstrom, spectra[j, :] * fluxunits) qsim.source.update_out() qsim.simulate() qsim.generate_random_noise(random_state) for i, output in enumerate(qsim.camera_output): assert output['observed_flux'].unit == 1e17 * fluxunits # Extract the simulation results needed to create our uncalibrated # frame output file. num_pixels = len(output) nobj[j, i, :num_pixels] = output['num_source_electrons'][:, 0] nsky[j, i, :num_pixels] = output['num_sky_electrons'][:, 0] nivar[j, i, :num_pixels] = 1.0 / output['variance_electrons'][:, 0] # Get results for our flux-calibrated output file. cframe_observedflux[ j, i, :num_pixels] = 1e17 * output['observed_flux'][:, 0] cframe_ivar[ j, i, :num_pixels] = 1e-34 * output['flux_inverse_variance'][:, 0] # Fill brick arrays from the results. camera = output.meta['name'] trueflux[camera][j][:] = 1e17 * output['observed_flux'][:, 0] noisyflux[camera][j][:] = 1e17 * ( output['observed_flux'][:, 0] + output['flux_calibration'][:, 0] * output['random_noise_electrons'][:, 0]) obsivar[camera][j][:] = 1e-34 * output['flux_inverse_variance'][:, 0] # Use the same noise realization in the cframe and frame, without any # additional noise from sky subtraction for now. frame_rand_noise[ j, i, :num_pixels] = output['random_noise_electrons'][:, 0] cframe_rand_noise[j, i, :num_pixels] = 1e17 * ( output['flux_calibration'][:, 0] * output['random_noise_electrons'][:, 0]) # The sky output file represents a model fit to ~40 sky fibers. # We reduce the variance by a factor of 25 to account for this and # give the sky an independent (Gaussian) noise realization. sky_ivar[ j, i, :num_pixels] = 25.0 / (output['variance_electrons'][:, 0] - output['num_source_electrons'][:, 0]) sky_rand_noise[j, i, :num_pixels] = random_state.normal( scale=1.0 / np.sqrt(sky_ivar[j, i, :num_pixels]), size=num_pixels) armName = {"b": 0, "r": 1, "z": 2} for channel in 'brz': #Before writing, convert from counts/bin to counts/A (as in Pixsim output) #Quicksim Default: #FLUX - input spectrum resampled to this binning; no noise added [1e-17 erg/s/cm2/s/Ang] #COUNTS_OBJ - object counts in 0.5 Ang bin #COUNTS_SKY - sky counts in 0.5 Ang bin num_pixels = len(waves[channel]) dwave = np.gradient(waves[channel]) nobj[:, armName[channel], :num_pixels] /= dwave frame_rand_noise[:, armName[channel], :num_pixels] /= dwave nivar[:, armName[channel], :num_pixels] *= dwave**2 nsky[:, armName[channel], :num_pixels] /= dwave sky_rand_noise[:, armName[channel], :num_pixels] /= dwave sky_ivar[:, armName[channel], :num_pixels] /= dwave**2 # Now write the outputs in DESI standard file system. None of the output file can have more than 500 spectra # Looping over spectrograph for ii in range((args.nspec + args.nstart - 1) // 500 + 1): start = max(500 * ii, args.nstart) # first spectrum for a given spectrograph end = min(500 * (ii + 1), nmax) # last spectrum for the spectrograph if (args.spectrograph <= ii): camera = "{}{}".format(channel, ii) log.info( "Writing files for channel:{}, spectrograph:{}, spectra:{} to {}" .format(channel, ii, start, end)) num_pixels = len(waves[channel]) # Write frame file framefileName = desispec.io.findfile("frame", NIGHT, EXPID, camera) frame_flux=nobj[start:end,armName[channel],:num_pixels]+ \ nsky[start:end,armName[channel],:num_pixels] + \ frame_rand_noise[start:end,armName[channel],:num_pixels] frame_ivar = nivar[start:end, armName[channel], :num_pixels] sh1 = frame_flux.shape[ 0] # required for slicing the resolution metric, resolusion matrix has (nspec,ndiag,wave) # for example if nstart =400, nspec=150: two spectrographs: # 400-499=> 0 spectrograph, 500-549 => 1 if (args.nstart == start): resol = resolution[channel][:sh1, :, :] else: resol = resolution[channel][-sh1:, :, :] # must create desispec.Frame object frame=Frame(waves[channel], frame_flux, frame_ivar,\ resolution_data=resol, spectrograph=ii, \ fibermap=fibermap[start:end], \ meta=dict(CAMERA=camera, FLAVOR=simspec.flavor) ) desispec.io.write_frame(framefileName, frame) framefilePath = desispec.io.findfile("frame", NIGHT, EXPID, camera) log.info("Wrote file {}".format(framefilePath)) if args.frameonly or simspec.flavor == 'arc': continue # Write cframe file cframeFileName = desispec.io.findfile("cframe", NIGHT, EXPID, camera) cframeFlux = cframe_observedflux[ start:end, armName[channel], :num_pixels] + cframe_rand_noise[ start:end, armName[channel], :num_pixels] cframeIvar = cframe_ivar[start:end, armName[channel], :num_pixels] # must create desispec.Frame object cframe = Frame(waves[channel], cframeFlux, cframeIvar, \ resolution_data=resol, spectrograph=ii, fibermap=fibermap[start:end], meta=dict(CAMERA=camera, FLAVOR=simspec.flavor) ) desispec.io.frame.write_frame(cframeFileName, cframe) cframefilePath = desispec.io.findfile("cframe", NIGHT, EXPID, camera) log.info("Wrote file {}".format(cframefilePath)) # Write sky file skyfileName = desispec.io.findfile("sky", NIGHT, EXPID, camera) skyflux=nsky[start:end,armName[channel],:num_pixels] + \ sky_rand_noise[start:end,armName[channel],:num_pixels] skyivar = sky_ivar[start:end, armName[channel], :num_pixels] skymask = np.zeros(skyflux.shape, dtype=np.uint32) # must create desispec.Sky object skymodel = SkyModel(waves[channel], skyflux, skyivar, skymask, header=dict(CAMERA=camera)) desispec.io.sky.write_sky(skyfileName, skymodel) skyfilePath = desispec.io.findfile("sky", NIGHT, EXPID, camera) log.info("Wrote file {}".format(skyfilePath)) # Write calib file calibVectorFile = desispec.io.findfile("calib", NIGHT, EXPID, camera) flux = cframe_observedflux[start:end, armName[channel], :num_pixels] phot = nobj[start:end, armName[channel], :num_pixels] calibration = np.zeros_like(phot) jj = (flux > 0) calibration[jj] = phot[jj] / flux[jj] #- TODO: what should calibivar be? #- For now, model it as the noise of combining ~10 spectra calibivar = 10 / cframe_ivar[start:end, armName[channel], :num_pixels] #mask=(1/calibivar>0).astype(int)?? mask = np.zeros(calibration.shape, dtype=np.uint32) # write flux calibration fluxcalib = FluxCalib(waves[channel], calibration, calibivar, mask) write_flux_calibration(calibVectorFile, fluxcalib) calibfilePath = desispec.io.findfile("calib", NIGHT, EXPID, camera) log.info("Wrote file {}".format(calibfilePath))
def integration_test(night=None, nspec=5, clobber=False): """Run an integration test from raw data simulations through redshifts Args: night (str, optional): YEARMMDD, defaults to current night nspec (int, optional): number of spectra to include clobber (bool, optional): rerun steps even if outputs already exist Raises: RuntimeError if any script fails """ log = logging.get_logger() log.setLevel(logging.DEBUG) # YEARMMDD string, rolls over at noon not midnight # TODO: fix usage of night to be something other than today if night is None: #night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600)) night = "20160726" # check for required environment variables check_env() # simulate inputs sim(night, nspec=nspec, clobber=clobber) # create production # FIXME: someday run PSF estimation too... ### com = "desi_pipe --env env.txt --spectrographs 0 --fakeboot --fakepsf" rawdir = os.path.join(os.getenv('DESI_SPECTRO_SIM'), os.getenv('PIXPROD')) com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf --raw {}".format(rawdir) sp.check_call(com, shell=True) # raw and production locations rawdir = os.path.abspath(io.rawdata_root()) proddir = os.path.abspath(io.specprod_root()) # Modify options file to restrict the spectral range optpath = os.path.join(proddir, "run", "options.yaml") opts = pipe.read_options(optpath) opts['extract']['specmin'] = 0 opts['extract']['nspec'] = nspec pipe.write_options(optpath, opts) # run the generated shell scripts # FIXME: someday run PSF estimation too... # print("Running bootcalib script...") # com = os.path.join(proddir, "run", "scripts", "bootcalib_all.sh") # sp.check_call(["bash", com]) # print("Running specex script...") # com = os.path.join(proddir, "run", "scripts", "specex_all.sh") # sp.check_call(["bash", com]) # print("Running psfcombine script...") # com = os.path.join(proddir, "run", "scripts", "psfcombine_all.sh") # sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "extract_all.sh") print("Running extraction script "+com) sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "fiberflat-procexp_all.sh") print("Running calibration script "+com) sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "bricks.sh") print("Running makebricks script "+com) sp.check_call(["bash", com]) com = os.path.join(proddir, "run", "scripts", "zfind_all.sh") print("Running zfind script "+com) sp.check_call(["bash", com]) # #----- # #- Did it work? # #- (this combination of fibermap, simspec, and zbest is a pain) expid = 2 fmfile = io.findfile('fibermap', night=night, expid=expid) fibermap = io.read_fibermap(fmfile) simdir = os.path.dirname(fmfile) simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) siminfo = fits.getdata(simspec, 'METADATA') brickdirs = glob.glob(os.path.join(proddir, "bricks", "*")) bricks = [ os.path.basename(x) for x in brickdirs ] print() print("--------------------------------------------------") print("Brick True z -> Class z zwarn") # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") for b in bricks: zbest = io.read_zbest(io.findfile('zbest', brickname=b)) for i in range(len(zbest.z)): if zbest.spectype[i] == 'ssp_em_galaxy': objtype = 'GAL' elif zbest.spectype[i] == 'spEigenStar': objtype = 'STAR' else: objtype = zbest.spectype[i] z, zwarn = zbest.z[i], zbest.zwarn[i] j = np.where(fibermap['TARGETID'] == zbest.targetid[i])[0][0] truetype = siminfo['OBJTYPE'][j] truez = siminfo['REDSHIFT'][j] dv = 3e5*(z-truez)/(1+truez) if truetype == 'SKY' and zwarn > 0: status = 'ok' elif zwarn == 0: if truetype == 'LRG' and objtype == 'GAL' and abs(dv) < 150: status = 'ok' elif truetype == 'ELG' and objtype == 'GAL' and abs(dv) < 150: status = 'ok' elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: status = 'ok' elif truetype == 'STD' and objtype == 'STAR': status = 'ok' else: status = 'OOPS' else: status = 'OOPS' print('{0} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'.format( b, truetype, truez, objtype, z, zwarn, status)) print("--------------------------------------------------")