class PypeIt(object): """ This class runs the primary calibration and extraction in PypeIt Args: pypeit_file (:obj:`str`): PypeIt filename verbosity (:obj:`int`, optional): Verbosity level of system output. Can be:: - 0: No output - 1: Minimal output (default) - 2: All output overwrite (:obj:`bool`, optional): Flag to overwrite any existing files/directories. reuse_masters (bool, optional): Reuse any pre-existing calibration files logname (:obj:`str`, optional): The name of an ascii log file with the details of the reduction. redux_path (:obj:`str`, optional): Over-ride reduction path in PypeIt file (e.g. Notebook usage) show: (:obj:`bool`, optional): Show reduction steps via plots (which will block further execution until clicked on) and outputs to ginga. Requires remote control ginga session via "ginga --modules=RC &" Attributes: pypeit_file (:obj:`str`): Name of the pypeit file to read. PypeIt files have a specific set of valid formats. A description can be found `here`_ (include doc link). fitstbl (:obj:`pypit.metadata.PypeItMetaData`): holds the meta info """ # __metaclass__ = ABCMeta def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None, show=False, redux_path=None): # Load cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(pypeit_file, runtime=True) self.pypeit_file = pypeit_file # Spectrograph cfg = ConfigObj(cfg_lines) spectrograph_name = cfg['rdx']['spectrograph'] self.spectrograph = load_spectrograph(spectrograph_name) # Par # Defaults spectrograph_def_par = self.spectrograph.default_pypeit_par() # Grab a science file for configuration specific parameters sci_file = None for idx, row in enumerate(usrdata): if 'science' in row['frametype']: sci_file = data_files[idx] break # Set spectrograph_cfg_lines = self.spectrograph.config_specific_par(spectrograph_def_par, sci_file).to_config() self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines) # Fitstbl self.fitstbl = PypeItMetaData(self.spectrograph, self.par, file_list=data_files, usrdata=usrdata, strict=True) # The following could be put in a prepare_to_run() method in PypeItMetaData if 'setup' not in self.fitstbl.keys(): self.fitstbl['setup'] = setups[0] self.fitstbl.get_frame_types(user=frametype) # This sets them using the user inputs self.fitstbl.set_defaults() # Only does something if values not set in PypeIt file self.fitstbl._set_calib_group_bits() self.fitstbl._check_calib_groups() # Write .calib file (For QA naming amongst other things) calib_file = pypeit_file.replace('.pypeit', '.calib') self.fitstbl.write_calib(calib_file) # Other Internals self.logname = logname self.overwrite = overwrite # Currently the runtime argument determines the behavior for reuse_masters. There is also a reuse_masters # parameter in the parset but it is currently ignored. self.reuse_masters=reuse_masters self.show = show # Make the output directories self.par['rdx']['redux_path'] = os.getcwd() if redux_path is None else redux_path msgs.info("Setting reduction path to {:s}".format(self.par['rdx']['redux_path'])) paths.make_dirs(self.spectrograph.spectrograph, self.par['calibrations']['caldir'], self.par['rdx']['scidir'], self.par['rdx']['qadir'], overwrite=self.overwrite, redux_path=self.par['rdx']['redux_path']) # Instantiate Calibrations class self.caliBrate \ = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], self.spectrograph, redux_path=self.par['rdx']['redux_path'], reuse_masters=self.reuse_masters, save_masters=True, write_qa=True, show=self.show) # Init self.verbosity = verbosity # TODO: I don't think this ever used self.frame = None self.det = None self.tstart = None self.basename = None self.sciI = None self.obstime = None def build_qa(self): """ Generate QA wrappers """ qa.gen_mf_html(self.pypeit_file) qa.gen_exp_html() def outfile_exists(self, frame): """ Check whether the 2D outfile of a given frame already exists Args: frame (int): Frame index from fitstbl Returns: bool: True if the 2d file exists False if it does not exist """ # Check if the 2d output file exists scidir = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir']) basename = self.fitstbl.construct_basename(frame) outfile = scidir + '/spec2d_{:s}.fits'.format(basename) return os.path.isfile(outfile) def get_std_outfile(self, standard_frames): """ Grab the output filename from an input list of standard_frame indices If more than one index is provided, the first is taken Args: standard_frames (list): List of indices corresponding to standard stars Returns: str: Full path to the standard spec1d output file """ # TODO: Need to decide how to associate standards with # science frames in the case where there is more than one # standard associated with a given science frame. Below, I # just use the first standard std_outfile = None std_frame = None if len(standard_frames) == 0 else standard_frames[0] # Prepare to load up standard? if std_frame is not None: std_outfile = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'], 'spec1d_{:s}.fits'.format(self.fitstbl.construct_basename(std_frame))) \ if isinstance(std_frame, (int,np.integer)) else None if std_outfile is not None and not os.path.isfile(std_outfile): msgs.error('Could not find standard file: {0}'.format(std_outfile)) return std_outfile def reduce_all(self): """ Main driver of the entire reduction Calibration and extraction via a series of calls to reduce_exposure() """ # Validate the parameter set required = ['rdx', 'calibrations', 'scienceframe', 'scienceimage', 'flexure', 'fluxcalib'] can_be_None = ['flexure', 'fluxcalib'] self.par.validate_keys(required=required, can_be_None=can_be_None) self.tstart = time.time() # Find the standard frames is_standard = self.fitstbl.find_frames('standard') # Find the science frames is_science = self.fitstbl.find_frames('science') # Frame indices frame_indx = np.arange(len(self.fitstbl)) # Iterate over each calibration group and reduce the standards for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the standard frames in this calibration group: grp_standards = frame_indx[is_standard & in_grp] # Reduce all the standard frames, loop on unique comb_id u_combid_std= np.unique(self.fitstbl['comb_id'][grp_standards]) for j, comb_id in enumerate(u_combid_std): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: std_dict = self.reduce_exposure(frames, bg_frames=bg_frames) # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], std_dict, self.basename) else: msgs.info('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') # Iterate over each calibration group again and reduce the science frames for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the science frames in this calibration group: grp_science = frame_indx[is_science & in_grp] # Associate standards (previously reduced above) for this setup std_outfile = self.get_std_outfile(frame_indx[is_standard]) # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration science_basename = [None]*len(grp_science) # Loop on unique comb_id u_combid = np.unique(self.fitstbl['comb_id'][grp_science]) for j, comb_id in enumerate(u_combid): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: sci_dict = self.reduce_exposure(frames, bg_frames=bg_frames, std_outfile=std_outfile) science_basename[j] = self.basename # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], sci_dict, self.basename) else: msgs.warn('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') msgs.info('Finished calibration group {0}'.format(i)) # Finish self.print_end_time() def select_detectors(self): """ Return the 1-indexed list of detectors to reduce. Returns: list: List of detectors to be reduced """ if self.par['rdx']['detnum'] is None: return np.arange(self.spectrograph.ndet)+1 return [self.par['rdx']['detnum']] if isinstance(self.par['rdx']['detnum'], int) \ else self.par['rdx']['detnum'] def reduce_exposure(self, frames, bg_frames=[], std_outfile=None): """ Reduce a single exposure Args: frame (:obj:`int`): 0-indexed row in :attr:`fitstbl` with the frame to reduce bgframes (:obj:`list`, optional): List of frame indices for the background std_outfile (:obj:`str`, optional): the name of a file with a previously PypeIt-reduced standard spectrum. Returns: dict: The dictionary containing the primary outputs of extraction """ # if show is set, clear the ginga channels at the start of each new sci_ID if self.show: ginga.clear_all() # Save the frame self.frames = frames self.bg_frames = bg_frames # Is this an IR reduction? self.ir_redux = True if len(bg_frames) > 0 else False # JFH Why does this need to be ordered? sci_dict = OrderedDict() # This needs to be ordered sci_dict['meta'] = {} sci_dict['meta']['vel_corr'] = 0. sci_dict['meta']['ir_redux'] = self.ir_redux # Print status message msgs_string = 'Reducing target {:s}'.format(self.fitstbl['target'][self.frames[0]]) + msgs.newline() msgs_string += 'Combining frames:' + msgs.newline() for iframe in self.frames: msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline() msgs.info(msgs_string) if len(bg_frames) > 0: bg_msgs_string = '' for iframe in self.bg_frames: bg_msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline() bg_msgs_string = msgs.newline() + 'Using background from frames:' + msgs.newline() + bg_msgs_string msgs.info(bg_msgs_string) # Find the detectors to reduce detectors = self.select_detectors() if len(detectors) != self.spectrograph.ndet: msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in set(np.arange(self.spectrograph.ndet))-set(detectors)]))) # Loop on Detectors for self.det in detectors: msgs.info("Working on detector {0}".format(self.det)) sci_dict[self.det] = {} # Calibrate #TODO Is the right behavior to just use the first frame? self.caliBrate.set_config(self.frames[0], self.det, self.par['calibrations']) self.caliBrate.run_the_steps() # Extract # TODO: pass back the background frame, pass in background # files as an argument. extract one takes a file list as an # argument and instantiates science within sci_dict[self.det]['sciimg'], sci_dict[self.det]['sciivar'], sci_dict[self.det]['skymodel'], \ sci_dict[self.det]['objmodel'], sci_dict[self.det]['ivarmodel'], sci_dict[self.det]['outmask'], \ sci_dict[self.det]['specobjs'], vel_corr \ = self.extract_one(self.frames, self.det, bg_frames = self.bg_frames, std_outfile = std_outfile) if vel_corr is not None: sci_dict['meta']['vel_corr'] = vel_corr # JFH TODO write out the background frame? # Return return sci_dict def flexure_correct(self, sobjs, maskslits): """ Correct for flexure Spectra are modified in place (wavelengths are shifted) Args: sobjs (SpecObjs): maskslits (ndarray): Mask of SpecObjs """ if self.par['flexure']['method'] != 'skip': flex_list = wave.flexure_obj(sobjs, maskslits, self.par['flexure']['method'], self.par['flexure']['spectrum'], mxshft=self.par['flexure']['maxshift']) # QA wave.flexure_qa(sobjs, maskslits, self.basename, self.det, flex_list, out_dir=self.par['rdx']['redux_path']) else: msgs.info('Skipping flexure correction.') def helio_correct(self, sobjs, maskslits, frame, obstime): """ Perform a heliocentric correction on a set of spectra Args: sobjs (pypeit.specobjs.SpecObjs): Spectra maskslits (ndarray): Slits that are masked frame (int): Frame to use for meta info obstime (astropy.time.Time): Returns: astropy.units.Quantity: Velocity correction in km/s """ # Helio, correct Earth's motion if (self.caliBrate.par['wavelengths']['frame'] in ['heliocentric', 'barycentric']) \ and (self.caliBrate.par['wavelengths']['reference'] != 'pixel'): # TODO change this keyword to refframe instead of frame msgs.info("Performing a {0} correction".format(self.caliBrate.par['wavelengths']['frame'])) vel, vel_corr = wave.geomotion_correct(sobjs, maskslits, self.fitstbl, frame, obstime, self.spectrograph.telescope['longitude'], self.spectrograph.telescope['latitude'], self.spectrograph.telescope['elevation'], self.caliBrate.par['wavelengths']['frame']) else: msgs.info('A wavelength reference-frame correction will not be performed.') vel_corr = None return vel_corr def get_sci_metadata(self, frame, det): """ Grab the meta data for a given science frame and specific detector Args: frame (int): Frame index det (int): Detector index Returns: 5 objects are returned:: - str: Object type; science or standard - str: Setup string from master_key() - astropy.time.Time: Time of observation - str: Basename of the frame - str: Binning of the detector """ # Set binning, obstime, basename, and objtype binning = self.fitstbl['binning'][frame] obstime = self.fitstbl.construct_obstime(frame) basename = self.fitstbl.construct_basename(frame, obstime=obstime) objtype = self.fitstbl['frametype'][frame] if 'science' in objtype: objtype_out = 'science' elif 'standard' in objtype: objtype_out = 'standard' else: msgs.error('Unrecognized objtype') setup = self.fitstbl.master_key(frame, det=det) return objtype_out, setup, obstime, basename, binning def get_std_trace(self, std_redux, det, std_outfile): """ Returns the trace of the standard if it is applicable to the current reduction Args: std_redux (bool): If False, proceed det (int): Detector index std_outfile (str): Filename for the standard star spec1d file Returns: ndarray: Trace of the standard star on input detector """ if std_redux is False and std_outfile is not None: sobjs, hdr_std = load.load_specobjs(std_outfile) # Does the detector match? # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we # we would use the standard from another detector this_det = sobjs.det == det if np.any(this_det): sobjs_det = sobjs[this_det] sobjs_std = sobjs_det.get_std() std_trace = sobjs_std.trace_spat # flatten the array if this multislit if 'MultiSlit' in self.spectrograph.pypeline: std_trace = std_trace.flatten() elif 'Echelle' in self.spectrograph.pypeline: std_trace = std_trace.T else: msgs.error('Unrecognized pypeline') else: std_trace = None else: std_trace = None return std_trace def extract_one(self, frames, det, bg_frames=[], std_outfile=None): """ Extract a single exposure/detector pair sci_ID and det need to have been set internally prior to calling this method Args: frames (list): List of frames to extract; stacked if more than one is provided det (int): bg_frames (list, optional): List of frames to use as the background std_outfile (str, optional): Returns: eight objects are returned:: - ndarray: Science image - ndarray: Science inverse variance image - ndarray: Model of the sky - ndarray: Model of the object - ndarray: Model of inverse variance - ndarray: Mask - :obj:`pypeit.specobjs.SpecObjs`: spectra - astropy.units.Quantity: velocity correction """ # Grab some meta-data needed for the reduction from the fitstbl self.objtype, self.setup, self.obstime, self.basename, self.binning = self.get_sci_metadata(frames[0], det) # Is this a standard star? self.std_redux = 'standard' in self.objtype # Get the standard trace if need be std_trace = self.get_std_trace(self.std_redux, det, std_outfile) # Instantiate ScienceImage for the files we will reduce sci_files = self.fitstbl.frame_paths(frames) self.sciI = scienceimage.ScienceImage(self.spectrograph, sci_files, bg_file_list=self.fitstbl.frame_paths(bg_frames), ir_redux = self.ir_redux, par=self.par['scienceframe'], det=det, binning=self.binning) # For QA on crash. msgs.sciexp = self.sciI # Process images (includes inverse variance image, rn2 image, and CR mask) self.sciimg, self.sciivar, self.rn2img, self.mask, self.crmask = \ self.sciI.proc(self.caliBrate.msbias, self.caliBrate.mspixflatnrm.copy(), self.caliBrate.msbpm, illum_flat=self.caliBrate.msillumflat, show=self.show) # Object finding, first pass on frame without sky subtraction self.maskslits = self.caliBrate.maskslits.copy() self.redux = reduce.instantiate_me(self.spectrograph, self.caliBrate.tslits_dict, self.mask, self.par, ir_redux = self.ir_redux, objtype=self.objtype, setup=self.setup, det=det, binning=self.binning) # Prep for manual extraction (if requested) manual_extract_dict = self.fitstbl.get_manual_extract(frames, det) # Do one iteration of object finding, and sky subtract to get initial sky model self.sobjs_obj, self.nobj, skymask_init = \ self.redux.find_objects(self.sciimg, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux, std_trace=std_trace,maskslits=self.maskslits, show=self.show & (not self.std_redux), manual_extract_dict=manual_extract_dict) # Global sky subtraction, first pass. Uses skymask from object finding step above self.initial_sky = \ self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], skymask=skymask_init, std=self.std_redux, maskslits=self.maskslits, show=self.show) if not self.std_redux: # Object finding, second pass on frame *with* sky subtraction. Show here if requested self.sobjs_obj, self.nobj, self.skymask = \ self.redux.find_objects(self.sciimg - self.initial_sky, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux, std_trace=std_trace,maskslits=self.maskslits,show=self.show, manual_extract_dict=manual_extract_dict) # If there are objects, do 2nd round of global_skysub, local_skysub_extract, flexure, geo_motion if self.nobj > 0: # Global sky subtraction second pass. Uses skymask from object finding self.global_sky = self.initial_sky if self.std_redux else \ self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], skymask=self.skymask, maskslits=self.maskslits, show=self.show) self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs = \ self.redux.local_skysub_extract(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], self.caliBrate.mswave, self.global_sky, self.rn2img, self.sobjs_obj, model_noise=(not self.ir_redux),std = self.std_redux, maskslits=self.maskslits, show_profile=self.show,show=self.show) # Purge out the negative objects if this was a near-IR reduction. # TODO should we move this purge call to local_skysub_extract?? if self.ir_redux: self.sobjs.purge_neg() # Flexure correction if this is not a standard star if not self.std_redux: self.redux.flexure_correct(self.sobjs, self.basename) # Grab coord radec = ltu.radec_to_coord((self.fitstbl["ra"][frames[0]], self.fitstbl["dec"][frames[0]])) self.vel_corr = self.redux.helio_correct(self.sobjs, radec, self.obstime) else: # Print status message msgs_string = 'No objects to extract for target {:s}'.format(self.fitstbl['target'][frames[0]]) + msgs.newline() msgs_string += 'On frames:' + msgs.newline() for iframe in frames: msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline() msgs.warn(msgs_string) # set to first pass global sky self.skymodel = self.initial_sky self.objmodel = np.zeros_like(self.sciimg) # Set to sciivar. Could create a model but what is the point? self.ivarmodel = np.copy(self.sciivar) # Set to the initial mask in case no objects were found self.outmask = self.redux.mask # empty specobjs object from object finding if self.ir_redux: self.sobjs_obj.purge_neg() self.sobjs = self.sobjs_obj self.vel_corr = None return self.sciimg, self.sciivar, self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs, self.vel_corr # TODO: Why not use self.frame? def save_exposure(self, frame, sci_dict, basename): """ Save the outputs from extraction for a given exposure Args: frame (:obj:`int`): 0-indexed row in the metadata table with the frame that has been reduced. sci_dict (:obj:`dict`): Dictionary containing the primary outputs of extraction basename (:obj:`str`): The root name for the output file. Returns: None or SpecObjs: All of the objects saved to disk """ # TODO: Need some checks here that the exposure has been reduced # Determine the headers head1d = self.fitstbl[frame] # Need raw file header information rawfile = self.fitstbl.frame_paths(frame) head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext,) refframe = 'pixel' if self.caliBrate.par['wavelengths']['reference'] == 'pixel' else \ self.caliBrate.par['wavelengths']['frame'] # Determine the paths/filenames scipath = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir']) save.save_all(sci_dict, self.caliBrate.master_key_dict, self.caliBrate.master_dir, self.spectrograph, head1d, head2d, scipath, basename, refframe=refframe, update_det=self.par['rdx']['detnum'], binning=self.fitstbl['binning'][frame]) return def msgs_reset(self): """ Reset the msgs object """ # Reset the global logger msgs.reset(log=self.logname, verbosity=self.verbosity) msgs.pypeit_file = self.pypeit_file def print_end_time(self): """ Print the elapsed time """ # Capture the end time and print it to user tend = time.time() codetime = tend-self.tstart if codetime < 60.0: msgs.info('Execution time: {0:.2f}s'.format(codetime)) elif codetime/60.0 < 60.0: mns = int(codetime/60.0) scs = codetime - 60.0*mns msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs)) else: hrs = int(codetime/3600.0) mns = int(60.0*(codetime/3600.0 - hrs)) scs = codetime - 60.0*mns - 3600.0*hrs msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format(hrs, mns, scs)) # TODO: Move this to fitstbl? def show_science(self): """ Simple print of science frames """ indx = self.fitstbl.find_frames('science') print(self.fitstbl[['target','ra','dec','exptime','dispname']][indx]) def __repr__(self): # Generate sets string return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)
class PypeIt: """ This class runs the primary calibration and extraction in PypeIt .. todo:: Fill in list of attributes! Args: pypeit_file (:obj:`str`): PypeIt filename. verbosity (:obj:`int`, optional): Verbosity level of system output. Can be: - 0: No output - 1: Minimal output (default) - 2: All output overwrite (:obj:`bool`, optional): Flag to overwrite any existing files/directories. reuse_masters (:obj:`bool`, optional): Reuse any pre-existing calibration files logname (:obj:`str`, optional): The name of an ascii log file with the details of the reduction. show: (:obj:`bool`, optional): Show reduction steps via plots (which will block further execution until clicked on) and outputs to ginga. Requires remote control ginga session via ``ginga --modules=RC,SlitWavelength &`` redux_path (:obj:`str`, optional): Over-ride reduction path in PypeIt file (e.g. Notebook usage) calib_only: (:obj:`bool`, optional): Only generate the calibration files that you can Attributes: pypeit_file (:obj:`str`): Name of the pypeit file to read. PypeIt files have a specific set of valid formats. A description can be found :ref:`pypeit_file`. fitstbl (:obj:`pypeit.metadata.PypeItMetaData`): holds the meta info """ def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None, show=False, redux_path=None, calib_only=False): # Set up logging self.logname = logname self.verbosity = verbosity self.pypeit_file = pypeit_file self.msgs_reset() # Load cfg_lines, data_files, frametype, usrdata, setups, _ \ = parse_pypeit_file(pypeit_file, runtime=True) self.calib_only = calib_only # Spectrograph cfg = ConfigObj(cfg_lines) spectrograph_name = cfg['rdx']['spectrograph'] self.spectrograph = load_spectrograph(spectrograph_name) msgs.info('Loaded spectrograph {0}'.format(self.spectrograph.name)) # -------------------------------------------------------------- # Get the full set of PypeIt parameters # - Grab a science or standard file for configuration specific parameters config_specific_file = None for idx, row in enumerate(usrdata): if ('science' in row['frametype']) or ('standard' in row['frametype']): config_specific_file = data_files[idx] # search for arcs, trace if no scistd was there if config_specific_file is None: for idx, row in enumerate(usrdata): if ('arc' in row['frametype']) or ('trace' in row['frametype']): config_specific_file = data_files[idx] if config_specific_file is not None: msgs.info( 'Setting configuration-specific parameters using {0}'.format( os.path.split(config_specific_file)[1])) spectrograph_cfg_lines = self.spectrograph.config_specific_par( config_specific_file).to_config() # - Build the full set, merging with any user-provided # parameters self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines) msgs.info('Built full PypeIt parameter set.') # Check the output paths are ready if redux_path is not None: self.par['rdx']['redux_path'] = redux_path # TODO: Write the full parameter set here? # -------------------------------------------------------------- # -------------------------------------------------------------- # Build the meta data # - Re-initilize based on the file data msgs.info('Compiling metadata') self.fitstbl = PypeItMetaData(self.spectrograph, self.par, files=data_files, usrdata=usrdata, strict=True) # - Interpret automated or user-provided data from the PypeIt # file self.fitstbl.finalize_usr_build(frametype, setups[0]) # -------------------------------------------------------------- # - Write .calib file (For QA naming amongst other things) calib_file = pypeit_file.replace('.pypeit', '.calib') self.fitstbl.write_calib(calib_file) # Other Internals self.overwrite = overwrite # Currently the runtime argument determines the behavior for # reuse_masters. self.reuse_masters = reuse_masters self.show = show # Set paths self.calibrations_path = os.path.join( self.par['rdx']['redux_path'], self.par['calibrations']['master_dir']) # Check for calibrations if not self.calib_only: calibrations.check_for_calibs( self.par, self.fitstbl, raise_error=self.par['calibrations']['raise_chk_error']) # Report paths msgs.info('Setting reduction path to {0}'.format( self.par['rdx']['redux_path'])) msgs.info('Master calibration data output to: {0}'.format( self.calibrations_path)) msgs.info('Science data output to: {0}'.format(self.science_path)) msgs.info('Quality assessment plots output to: {0}'.format( self.qa_path)) # TODO: Is anything written to the qa dir or only to qa/PNGs? # Should we have separate calibration and science QA # directories? # An html file wrapping them all too # Init # TODO: I don't think this ever used self.det = None self.tstart = None self.basename = None self.sciI = None self.obstime = None @property def science_path(self): """Return the path to the science directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir']) @property def qa_path(self): """Return the path to the top-level QA directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['qadir']) def build_qa(self): """ Generate QA wrappers """ msgs.qa_path = self.qa_path qa.gen_qa_dir(self.qa_path) qa.gen_mf_html(self.pypeit_file, self.qa_path) qa.gen_exp_html() # TODO: This should go in a more relevant place def spec_output_file(self, frame, twod=False): """ Return the path to the spectral output data file. Args: frame (:obj:`int`): Frame index from :attr:`fitstbl`. twod (:obj:`bool`): Name for the 2D output file; 1D file otherwise. Returns: :obj:`str`: The path for the output file """ return os.path.join( self.science_path, 'spec{0}d_{1}.fits'.format('2' if twod else '1', self.fitstbl.construct_basename(frame))) def outfile_exists(self, frame): """ Check whether the 2D outfile of a given frame already exists Args: frame (int): Frame index from fitstbl Returns: bool: True if the 2d file exists, False if it does not exist """ return os.path.isfile(self.spec_output_file(frame, twod=True)) def get_std_outfile(self, standard_frames): """ Grab the output filename from an input list of standard_frame indices If more than one index is provided, the first is taken Args: standard_frames (list): List of indices corresponding to standard stars Returns: str: Full path to the standard spec1d output file """ # TODO: Need to decide how to associate standards with # science frames in the case where there is more than one # standard associated with a given science frame. Below, I # just use the first standard std_outfile = None std_frame = None if len(standard_frames) == 0 else standard_frames[0] # Prepare to load up standard? if std_frame is not None: std_outfile = self.spec_output_file(std_frame) \ if isinstance(std_frame, (int,np.integer)) else None if std_outfile is not None and not os.path.isfile(std_outfile): msgs.error('Could not find standard file: {0}'.format(std_outfile)) return std_outfile def calib_all(self, run=True): """ Create calibrations for all setups This will not crash if not all of the standard set of files are not provided Args: run (bool, optional): If False, only print the calib names and do not actually run. Only used with the pypeit_parse_calib_id script Returns: dict: A simple dict summarizing the calibration names """ calib_dict = {} self.tstart = time.time() # Frame indices frame_indx = np.arange(len(self.fitstbl)) for i in range(self.fitstbl.n_calib_groups): # 1-indexed calib number calib_grp = str(i + 1) # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) grp_frames = frame_indx[in_grp] # Find the detectors to reduce # detectors = PypeIt.select_detectors(detnum=self.par['rdx']['detnum'], # ndet=self.spectrograph.ndet) detectors = self.spectrograph.select_detectors( subset=self.par['rdx']['detnum']) calib_dict[calib_grp] = {} # Loop on Detectors for self.det in detectors: # Instantiate Calibrations class self.caliBrate = calibrations.Calibrations.get_instance( self.fitstbl, self.par['calibrations'], self.spectrograph, self.calibrations_path, qadir=self.qa_path, reuse_masters=self.reuse_masters, show=self.show, user_slits=slittrace.merge_user_slit( self.par['rdx']['slitspatnum'], self.par['rdx']['maskIDs'])) # Do it # TODO: Why isn't set_config part of the Calibrations.__init__ method? self.caliBrate.set_config(grp_frames[0], self.det, self.par['calibrations']) # Allow skipping the run (e.g. parse_calib_id.py script) if run: self.caliBrate.run_the_steps() key = self.caliBrate.master_key_dict['frame'] calib_dict[calib_grp][key] = {} for step in self.caliBrate.steps: if step in ['bpm', 'slits', 'wv_calib', 'tilts', 'flats']: continue elif step == 'tiltimg': # Annoying kludge step = 'tilt' # Prep raw_files, self.caliBrate.master_key_dict[ step] = self.caliBrate._prep_calibrations(step) masterframe_name = masterframe.construct_file_name( buildimage.frame_image_classes[step], self.caliBrate.master_key_dict[step], master_dir=self.caliBrate.master_dir) # Add to dict if len(raw_files) > 0: calib_dict[calib_grp][key][step] = {} calib_dict[calib_grp][key][step][ 'master_key'] = self.caliBrate.master_key_dict[ step] calib_dict[calib_grp][key][step][ 'master_name'] = os.path.basename(masterframe_name) calib_dict[calib_grp][key][step]['raw_files'] = [ os.path.basename(ifile) for ifile in raw_files ] # Print the results print(json.dumps(calib_dict, sort_keys=True, indent=4)) # Write msgs.info('Writing calib file') calib_file = self.pypeit_file.replace('.pypeit', '.calib_ids') ltu.savejson(calib_file, calib_dict, overwrite=True, easy_to_read=True) # Finish self.print_end_time() # Return return calib_dict def reduce_all(self): """ Main driver of the entire reduction Calibration and extraction via a series of calls to reduce_exposure() """ # Validate the parameter set self.par.validate_keys(required=[ 'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure' ]) self.tstart = time.time() # Find the standard frames is_standard = self.fitstbl.find_frames('standard') # Find the science frames is_science = self.fitstbl.find_frames('science') # Frame indices frame_indx = np.arange(len(self.fitstbl)) # Standard Star(s) Loop # Iterate over each calibration group and reduce the standards for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the standard frames in this calibration group: grp_standards = frame_indx[is_standard & in_grp] # Reduce all the standard frames, loop on unique comb_id u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards]) for j, comb_id in enumerate(u_combid_std): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: # Build history to document what contributd to the reduced # exposure history = History(self.fitstbl.frame_paths(frames[0])) history.add_reduce(i, self.fitstbl, frames, bg_frames) std_spec2d, std_sobjs = self.reduce_exposure( frames, bg_frames=bg_frames) # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], std_spec2d, std_sobjs, self.basename, history) else: msgs.info( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') # Science Frame(s) Loop # Iterate over each calibration group again and reduce the science frames for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the science frames in this calibration group: grp_science = frame_indx[is_science & in_grp] # Associate standards (previously reduced above) for this setup std_outfile = self.get_std_outfile(frame_indx[is_standard]) # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration science_basename = [None] * len(grp_science) # Loop on unique comb_id u_combid = np.unique(self.fitstbl['comb_id'][grp_science]) for j, comb_id in enumerate(u_combid): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] # Find all frames whose comb_id matches the current frames bkg_id. bg_frames = np.where((self.fitstbl['comb_id'] == self.fitstbl['bkg_id'][frames][0]) & (self.fitstbl['comb_id'] >= 0))[0] # JFH changed the syntax below to that above, which allows frames to be used more than once # as a background image. The syntax below would require that we could somehow list multiple # numbers for the bkg_id which is impossible without a comma separated list # bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: # Build history to document what contributd to the reduced # exposure history = History(self.fitstbl.frame_paths(frames[0])) history.add_reduce(i, self.fitstbl, frames, bg_frames) # TODO -- Should we reset/regenerate self.slits.mask for a new exposure sci_spec2d, sci_sobjs = self.reduce_exposure( frames, bg_frames=bg_frames, std_outfile=std_outfile) science_basename[j] = self.basename # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], sci_spec2d, sci_sobjs, self.basename, history) else: msgs.warn( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') msgs.info('Finished calibration group {0}'.format(i)) # Finish self.print_end_time() def reduce_exposure(self, frames, bg_frames=None, std_outfile=None): """ Reduce a single exposure Args: frames (:obj:`list`): List of 0-indexed rows in :attr:`fitstbl` with the frames to reduce. bg_frames (:obj:`list`, optional): List of frame indices for the background. std_outfile (:obj:`str`, optional): File with a previously reduced standard spectrum from PypeIt. Returns: dict: The dictionary containing the primary outputs of extraction. """ # if show is set, clear the ginga channels at the start of each new sci_ID if self.show: # TODO: Put this in a try/except block? display.clear_all(allow_new=True) has_bg = True if bg_frames is not None and len( bg_frames) > 0 else False # Is this an b/g subtraction reduction? if has_bg: self.bkg_redux = True # The default is to find_negative objects if the bg_frames are classified as "science", and to not find_negative # objects if the bg_frames are classified as "sky". This can be explicitly overridden if # par['reduce']['findobj']['find_negative'] is set to something other than the default of None. self.find_negative = (('science' in self.fitstbl['frametype'][bg_frames[0]]) | ('standard' in self.fitstbl['frametype'][bg_frames[0]]))\ if self.par['reduce']['findobj']['find_negative'] is None else self.par['reduce']['findobj']['find_negative'] else: self.bkg_redux = False self.find_negative = False # Container for all the Spec2DObj all_spec2d = spec2dobj.AllSpec2DObj() all_spec2d['meta']['bkg_redux'] = self.bkg_redux all_spec2d['meta']['find_negative'] = self.find_negative # TODO -- Should we reset/regenerate self.slits.mask for a new exposure # container for specobjs during first loop (objfind) all_specobjs_objfind = specobjs.SpecObjs() # container for specobjs during second loop (extraction) all_specobjs_extract = specobjs.SpecObjs() # list of global_sky obtained during objfind and used in extraction initial_sky_list = [] # list of sciImg sciImg_list = [] # List of detectors with successful calibration calibrated_det = [] # list of successful MasterSlits calibrations to be used in the extraction loop calib_slits = [] # List of objFind objects objFind_list = [] # Print status message msgs_string = 'Reducing target {:s}'.format( self.fitstbl['target'][frames[0]]) + msgs.newline() # TODO: Print these when the frames are actually combined, # backgrounds are used, etc? msgs_string += 'Combining frames:' + msgs.newline() for iframe in frames: msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() msgs.info(msgs_string) if has_bg: bg_msgs_string = '' for iframe in bg_frames: bg_msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() bg_msgs_string = msgs.newline( ) + 'Using background from frames:' + msgs.newline( ) + bg_msgs_string msgs.info(bg_msgs_string) # Find the detectors to reduce subset = self.par['rdx']['slitspatnum'] if self.par['rdx']['slitspatnum'] is not None \ else self.par['rdx']['detnum'] detectors = self.spectrograph.select_detectors(subset=subset) msgs.info(f'Detectors to work on: {detectors}') # Loop on Detectors # TODO: Attempt to put in a multiprocessing call here? # objfind for self.det in detectors: msgs.info("Working on detector {0}".format(self.det)) # run calibration self.caliBrate = self.calib_one(frames, self.det) if not self.caliBrate.success: msgs.warn( f'Calibrations for detector {self.det} were unsuccessful! The step ' f'that failed was {self.caliBrate.failed_step}. Continuing by ' f'skipping this detector.') continue # we save only the detectors that had a successful calibration, # and we use only those in the extract loop below calibrated_det.append(self.det) # we also save the successful MasterSlits calibrations because they are used and modified # in the slitmask stuff in between the two loops calib_slits.append(self.caliBrate.slits) # global_sky, skymask and sciImg are needed in the extract loop initial_sky, sobjs_obj, sciImg, objFind = self.objfind_one( frames, self.det, bg_frames, std_outfile=std_outfile) if len(sobjs_obj) > 0: all_specobjs_objfind.add_sobj(sobjs_obj) initial_sky_list.append(initial_sky) sciImg_list.append(sciImg) objFind_list.append(objFind) # slitmask stuff if self.par['reduce']['slitmask']['assign_obj']: # get object positions from slitmask design and slitmask offsets for all the detectors spat_flexure = np.array([ss.spat_flexure for ss in sciImg_list]) # Grab platescale with binning bin_spec, bin_spat = parse.parse_binning(self.binning) platescale = np.array( [ss.detector.platescale * bin_spat for ss in sciImg_list]) # get the dither offset if available if self.par['reduce']['slitmask']['use_dither_offset']: dither = self.spectrograph.parse_dither_pattern( [self.fitstbl.frame_paths(frames[0])]) dither_off = dither[2][0] if dither is not None else None else: dither_off = None calib_slits = slittrace.get_maskdef_objpos_offset_alldets( all_specobjs_objfind, calib_slits, spat_flexure, platescale, self.par['calibrations']['slitedges']['det_buffer'], self.par['reduce']['slitmask'], dither_off=dither_off) # determine if slitmask offsets exist and compute an average offsets over all the detectors calib_slits = slittrace.average_maskdef_offset( calib_slits, platescale[0], self.spectrograph.list_detectors( mosaic='MSC' in calib_slits[0].detname)) # slitmask design matching and add undetected objects all_specobjs_objfind = slittrace.assign_addobjs_alldets( all_specobjs_objfind, calib_slits, spat_flexure, platescale, self.par['reduce']['slitmask'], self.par['reduce']['findobj']['find_fwhm']) # Extract for i, self.det in enumerate(calibrated_det): # re-run (i.e., load) calibrations self.caliBrate = self.calib_one(frames, self.det) self.caliBrate.slits = calib_slits[i] detname = sciImg_list[i].detector.name # TODO: pass back the background frame, pass in background # files as an argument. extract one takes a file list as an # argument and instantiates science within if all_specobjs_objfind.nobj > 0: all_specobjs_on_det = all_specobjs_objfind[ all_specobjs_objfind.DET == detname] else: all_specobjs_on_det = all_specobjs_objfind # Extract all_spec2d[detname], tmp_sobjs \ = self.extract_one(frames, self.det, sciImg_list[i], objFind_list[i], initial_sky_list[i], all_specobjs_on_det) # Hold em if tmp_sobjs.nobj > 0: all_specobjs_extract.add_sobj(tmp_sobjs) # JFH TODO write out the background frame? # TODO -- Save here? Seems like we should. Would probably need to use update_det=True # Return return all_spec2d, all_specobjs_extract def get_sci_metadata(self, frame, det): """ Grab the meta data for a given science frame and specific detector Args: frame (int): Frame index det (int): Detector index Returns: 5 objects are returned:: - str: Object type; science or standard - str: Setup string from master_key() - astropy.time.Time: Time of observation - str: Basename of the frame - str: Binning of the detector """ # Set binning, obstime, basename, and objtype binning = self.fitstbl['binning'][frame] obstime = self.fitstbl.construct_obstime(frame) basename = self.fitstbl.construct_basename(frame, obstime=obstime) objtype = self.fitstbl['frametype'][frame] if 'science' in objtype: objtype_out = 'science' elif 'standard' in objtype: objtype_out = 'standard' else: msgs.error('Unrecognized objtype') setup = self.fitstbl.master_key(frame, det=det) return objtype_out, setup, obstime, basename, binning def calib_one(self, frames, det): """ Run Calibration for a single exposure/detector pair Args: frames (:obj:`list`): List of frames to extract; stacked if more than one is provided det (:obj:`int`): Detector number (1-indexed) Returns: caliBrate (:class:`pypeit.calibrations.Calibrations`) """ msgs.info("Working on detector {0}".format(det)) # Instantiate Calibrations class caliBrate = calibrations.Calibrations.get_instance( self.fitstbl, self.par['calibrations'], self.spectrograph, self.calibrations_path, qadir=self.qa_path, reuse_masters=self.reuse_masters, show=self.show, user_slits=slittrace.merge_user_slit( self.par['rdx']['slitspatnum'], self.par['rdx']['maskIDs'])) #slitspat_num=self.par['rdx']['slitspatnum']) # These need to be separate to accomodate COADD2D caliBrate.set_config(frames[0], det, self.par['calibrations']) caliBrate.run_the_steps() return caliBrate def objfind_one(self, frames, det, bg_frames, std_outfile=None): """ Reduce + Find Objects in a single exposure/detector pair sci_ID and det need to have been set internally prior to calling this method Parameters ---------- frames : :obj:`list` List of frames to extract; stacked if more than one is provided det : :obj:`int` Detector number (1-indexed) bg_frames : :obj:`list` List of frames to use as the background. Can be empty. std_outfile : :obj:`str`, optional Filename for the standard star spec1d file. Passed directly to :func:`get_std_trace`. Returns ------- global_sky : `numpy.ndarray`_ Initial global sky model sobjs_obj : :class:`~pypeit.specobjs.SpecObjs` List of objects found sciImg : :class:`~pypeit.images.pypeitimage.PypeItImage` Science image objFind : :class:`~pypeit.find_objects.FindObjects` Object finding speobject """ # Grab some meta-data needed for the reduction from the fitstbl self.objtype, self.setup, self.obstime, self.basename, self.binning \ = self.get_sci_metadata(frames[0], det) msgs.info("Object finding begins for {} on det={}".format( self.basename, det)) # Is this a standard star? self.std_redux = 'standard' in self.objtype frame_par = self.par['calibrations'][ 'standardframe'] if self.std_redux else self.par['scienceframe'] # Get the standard trace if need be if self.std_redux is False and std_outfile is not None: std_trace = specobjs.get_std_trace( self.spectrograph.get_det_name(det), std_outfile) else: std_trace = None # Build Science image sci_files = self.fitstbl.frame_paths(frames) sciImg = buildimage.buildimage_fromlist( self.spectrograph, det, frame_par, sci_files, bias=self.caliBrate.msbias, bpm=self.caliBrate.msbpm, dark=self.caliBrate.msdark, flatimages=self.caliBrate.flatimages, slits=self.caliBrate.slits, # For flexure correction ignore_saturation=False) # Background Image? if len(bg_frames) > 0: bg_file_list = self.fitstbl.frame_paths(bg_frames) sciImg = sciImg.sub( buildimage.buildimage_fromlist( self.spectrograph, det, frame_par, bg_file_list, bpm=self.caliBrate.msbpm, bias=self.caliBrate.msbias, dark=self.caliBrate.msdark, flatimages=self.caliBrate.flatimages, slits=self.caliBrate.slits, # For flexure correction ignore_saturation=False), frame_par['process']) # Deal with manual extraction row = self.fitstbl[frames[0]] manual_obj = ManualExtractionObj.by_fitstbl_input( row['filename'], row['manual'], self.spectrograph) if len(row['manual'].strip()) > 0 else None # Instantiate Reduce object # Required for pypeline specific object # At instantiaton, the fullmask in self.sciImg is modified objFind = find_objects.FindObjects.get_instance( sciImg, self.spectrograph, self.par, self.caliBrate, self.objtype, bkg_redux=self.bkg_redux, manual=manual_obj, find_negative=self.find_negative, std_redux=self.std_redux, show=self.show, basename=self.basename) # Do it initial_sky, sobjs_obj = objFind.run(std_trace=std_trace, show_peaks=self.show) # Return return initial_sky, sobjs_obj, sciImg, objFind def extract_one(self, frames, det, sciImg, objFind, initial_sky, sobjs_obj): """ Extract Objects in a single exposure/detector pair sci_ID and det need to have been set internally prior to calling this method Args: frames (:obj:`list`): List of frames to extract; stacked if more than one is provided det (:obj:`int`): Detector number (1-indexed) sciImg (:class:`PypeItImage`): Data container that holds a single image from a single detector its related images (e.g. ivar, mask) objFind : :class:`~pypeit.find_objects.FindObjects` Object finding object initial_sky (`numpy.ndarray`_): Initial global sky model sobjs_obj (:class:`pypeit.specobjs.SpecObjs`): List of objects found during `run_objfind` Returns: tuple: Returns six `numpy.ndarray`_ objects and a :class:`pypeit.specobjs.SpecObjs` object with the extracted spectra from this exposure/detector pair. The six `numpy.ndarray`_ objects are (1) the science image, (2) its inverse variance, (3) the sky model, (4) the object model, (5) the model inverse variance, and (6) the mask. """ # Grab some meta-data needed for the reduction from the fitstbl self.objtype, self.setup, self.obstime, self.basename, self.binning \ = self.get_sci_metadata(frames[0], det) # Is this a standard star? self.std_redux = 'standard' in self.objtype # Update the skymask skymask = objFind.create_skymask(sobjs_obj) # Update the global sky if 'standard' in self.fitstbl['frametype'][frames[0]] or \ self.par['reduce']['findobj']['skip_final_global'] or \ self.par['reduce']['skysub']['load_mask'] or \ self.par['reduce']['skysub']['user_regions'] is not None: final_global_sky = initial_sky else: final_global_sky = objFind.global_skysub(previous_sky=initial_sky, skymask=skymask, show=self.show) scaleImg = objFind.scaleimg # update here slits.mask since global_skysub modify reduce_bpm and we need to propagate it into extraction flagged_slits = np.where(objFind.reduce_bpm)[0] if len(flagged_slits) > 0: self.caliBrate.slits.mask[flagged_slits] = \ self.caliBrate.slits.bitmask.turn_on(self.caliBrate.slits.mask[flagged_slits], 'BADREDUCE') msgs.info("Extraction begins for {} on det={}".format( self.basename, det)) # Instantiate Reduce object # Required for pypeline specific object # At instantiaton, the fullmask in self.sciImg is modified # TODO Are we repeating steps in the init for FindObjects and Extract?? self.exTract = extraction.Extract.get_instance( sciImg, sobjs_obj, self.spectrograph, self.par, self.caliBrate, self.objtype, bkg_redux=self.bkg_redux, return_negative=self.par['reduce']['extraction'] ['return_negative'], std_redux=self.std_redux, show=self.show, basename=self.basename) if not self.par['reduce']['extraction']['skip_extraction']: skymodel, objmodel, ivarmodel, outmask, sobjs, waveImg, \ tilts = self.exTract.run(final_global_sky, ra=self.fitstbl["ra"][frames[0]], dec=self.fitstbl["dec"][frames[0]], obstime=self.obstime) else: # Although exrtaction is not performed, still need to prepare some masks and the tilts self.exTract.prepare_extraction() # Since the extraction was not performed, fill the arrays with the best available information skymodel = final_global_sky objmodel = np.zeros_like(self.exTract.sciImg.image) ivarmodel = np.copy(self.exTract.sciImg.ivar) outmask = self.exTract.sciImg.fullmask waveImg = self.exTract.waveimg tilts = self.exTract.tilts sobjs = sobjs_obj # TODO -- Do this upstream # Tack on detector and wavelength RMS for sobj in sobjs: sobj.DETECTOR = sciImg.detector iwv = np.where( self.caliBrate.wv_calib.spat_ids == sobj.SLITID)[0][0] sobj.WAVE_RMS = self.caliBrate.wv_calib.wv_fits[iwv].rms # Construct table of spectral flexure spec_flex_table = Table() spec_flex_table['spat_id'] = self.caliBrate.slits.spat_id spec_flex_table['sci_spec_flexure'] = self.exTract.slitshift # pull out maskdef_designtab from caliBrate.slits maskdef_designtab = self.caliBrate.slits.maskdef_designtab slits = copy.deepcopy(self.caliBrate.slits) slits.maskdef_designtab = None # Construct the Spec2DObj spec2DObj = spec2dobj.Spec2DObj( sciimg=sciImg.image, ivarraw=sciImg.ivar, skymodel=skymodel, objmodel=objmodel, ivarmodel=ivarmodel, scaleimg=scaleImg, waveimg=waveImg, bpmmask=outmask, detector=sciImg.detector, sci_spat_flexure=sciImg.spat_flexure, sci_spec_flexure=spec_flex_table, vel_corr=self.exTract.vel_corr, vel_type=self.par['calibrations']['wavelengths']['refframe'], tilts=tilts, slits=slits, maskdef_designtab=maskdef_designtab) spec2DObj.process_steps = sciImg.process_steps # QA spec2DObj.gen_qa() # Return return spec2DObj, sobjs def save_exposure(self, frame, all_spec2d, all_specobjs, basename, history=None): """ Save the outputs from extraction for a given exposure Args: frame (:obj:`int`): 0-indexed row in the metadata table with the frame that has been reduced. all_spec2d(:class:`pypeit.spec2dobj.AllSpec2DObj`): sci_dict (:obj:`dict`): Dictionary containing the primary outputs of extraction basename (:obj:`str`): The root name for the output file. history (:obj:`pypeit.history.History`): History entries to be added to fits header Returns: None or SpecObjs: All of the objects saved to disk """ # TODO: Need some checks here that the exposure has been reduced? # Determine the headers row_fitstbl = self.fitstbl[frame] # Need raw file header information rawfile = self.fitstbl.frame_paths(frame) head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext) # Check for the directory if not os.path.isdir(self.science_path): os.makedirs(self.science_path) # NOTE: There are some gymnastics here to keep from altering # self.par['rdx']['detnum']. I.e., I can't just set update_det = # self.par['rdx']['detnum'] because that can alter the latter if I don't # deepcopy it... if self.par['rdx']['detnum'] is None: update_det = None elif isinstance(self.par['rdx']['detnum'], list): update_det = [ self.spectrograph.allowed_mosaics.index(d) + 1 if isinstance(d, tuple) else d for d in self.par['rdx']['detnum'] ] else: update_det = self.par['rdx']['detnum'] subheader = self.spectrograph.subheader_for_spec(row_fitstbl, head2d) # 1D spectra if all_specobjs.nobj > 0: # Spectra outfile1d = os.path.join(self.science_path, 'spec1d_{:s}.fits'.format(basename)) # TODO #embed(header='deal with the following for maskIDs; 713 of pypeit') all_specobjs.write_to_fits( subheader, outfile1d, update_det=update_det, slitspatnum=self.par['rdx']['slitspatnum'], history=history) # Info outfiletxt = os.path.join(self.science_path, 'spec1d_{:s}.txt'.format(basename)) # TODO: Note we re-read in the specobjs from disk to deal with situations where # only a single detector is run in a second pass but in the same reduction directory. # Thiw was to address Issue #1116 in PR #1154. Slightly inefficient, but only other # option is to re-work write_info to also "append" sobjs = specobjs.SpecObjs.from_fitsfile(outfile1d, chk_version=False) sobjs.write_info(outfiletxt, self.spectrograph.pypeline) #all_specobjs.write_info(outfiletxt, self.spectrograph.pypeline) # 2D spectra outfile2d = os.path.join(self.science_path, 'spec2d_{:s}.fits'.format(basename)) # Build header pri_hdr = all_spec2d.build_primary_hdr( head2d, self.spectrograph, redux_path=self.par['rdx']['redux_path'], master_key_dict=self.caliBrate.master_key_dict, master_dir=self.caliBrate.master_dir, subheader=subheader, history=history) # Write all_spec2d.write_to_fits(outfile2d, pri_hdr=pri_hdr, update_det=update_det, slitspatnum=self.par['rdx']['slitspatnum']) def msgs_reset(self): """ Reset the msgs object """ # Reset the global logger msgs.reset(log=self.logname, verbosity=self.verbosity) msgs.pypeit_file = self.pypeit_file def print_end_time(self): """ Print the elapsed time """ # Capture the end time and print it to user msgs.info(utils.get_time_string(time.time() - self.tstart)) # TODO: Move this to fitstbl? def show_science(self): """ Simple print of science frames """ indx = self.fitstbl.find_frames('science') print(self.fitstbl[['target', 'ra', 'dec', 'exptime', 'dispname']][indx]) def __repr__(self): # Generate sets string return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)
class PypeIt(object): """ This class runs the primary calibration and extraction in PypeIt .. todo:: Fill in list of attributes! Args: pypeit_file (:obj:`str`): PypeIt filename. verbosity (:obj:`int`, optional): Verbosity level of system output. Can be: - 0: No output - 1: Minimal output (default) - 2: All output overwrite (:obj:`bool`, optional): Flag to overwrite any existing files/directories. reuse_masters (:obj:`bool`, optional): Reuse any pre-existing calibration files logname (:obj:`str`, optional): The name of an ascii log file with the details of the reduction. show: (:obj:`bool`, optional): Show reduction steps via plots (which will block further execution until clicked on) and outputs to ginga. Requires remote control ginga session via ``ginga --modules=RC &`` redux_path (:obj:`str`, optional): Over-ride reduction path in PypeIt file (e.g. Notebook usage) Attributes: pypeit_file (:obj:`str`): Name of the pypeit file to read. PypeIt files have a specific set of valid formats. A description can be found :ref:`pypeit_file`. fitstbl (:obj:`pypit.metadata.PypeItMetaData`): holds the meta info """ # __metaclass__ = ABCMeta def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None, show=False, redux_path=None): # Load cfg_lines, data_files, frametype, usrdata, setups \ = parse_pypeit_file(pypeit_file, runtime=True) self.pypeit_file = pypeit_file # Spectrograph cfg = ConfigObj(cfg_lines) spectrograph_name = cfg['rdx']['spectrograph'] self.spectrograph = load_spectrograph(spectrograph_name, ifile=data_files[0]) msgs.info('Loaded spectrograph {0}'.format( self.spectrograph.spectrograph)) # -------------------------------------------------------------- # Get the full set of PypeIt parameters # - Grab a science or standard file for configuration specific parameters scistd_file = None for idx, row in enumerate(usrdata): if ('science' in row['frametype']) or ('standard' in row['frametype']): scistd_file = data_files[idx] break # - Configuration specific parameters for the spectrograph if scistd_file is not None: msgs.info( 'Setting configuration-specific parameters using {0}'.format( os.path.split(scistd_file)[1])) spectrograph_cfg_lines = self.spectrograph.config_specific_par( scistd_file).to_config() # - Build the full set, merging with any user-provided # parameters self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines) msgs.info('Built full PypeIt parameter set.') # Check the output paths are ready if redux_path is not None: self.par['rdx']['redux_path'] = redux_path # TODO: Write the full parameter set here? # -------------------------------------------------------------- # -------------------------------------------------------------- # Build the meta data # - Re-initilize based on the file data msgs.info('Compiling metadata') self.fitstbl = PypeItMetaData(self.spectrograph, self.par, files=data_files, usrdata=usrdata, strict=True) # - Interpret automated or user-provided data from the PypeIt # file self.fitstbl.finalize_usr_build(frametype, setups[0]) # -------------------------------------------------------------- # - Write .calib file (For QA naming amongst other things) calib_file = pypeit_file.replace('.pypeit', '.calib') self.fitstbl.write_calib(calib_file) # Other Internals self.logname = logname self.overwrite = overwrite # Currently the runtime argument determines the behavior for # reuse_masters. self.reuse_masters = reuse_masters self.show = show # Set paths if self.par['calibrations']['caldir'] == 'default': self.calibrations_path = os.path.join( self.par['rdx']['redux_path'], 'Masters') else: self.calibrations_path = self.par['calibrations']['caldir'] # Report paths msgs.info('Setting reduction path to {0}'.format( self.par['rdx']['redux_path'])) msgs.info('Master calibration data output to: {0}'.format( self.calibrations_path)) msgs.info('Science data output to: {0}'.format(self.science_path)) msgs.info('Quality assessment plots output to: {0}'.format( self.qa_path)) # TODO: Is anything written to the qa dir or only to qa/PNGs? # Should we have separate calibration and science QA # directories? # Instantiate Calibrations class self.caliBrate \ = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], self.spectrograph, caldir=self.calibrations_path, qadir=self.qa_path, reuse_masters=self.reuse_masters, show=self.show) # Init self.verbosity = verbosity # TODO: I don't think this ever used self.frame = None self.det = None self.tstart = None self.basename = None self.sciI = None self.obstime = None @property def science_path(self): """Return the path to the science directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir']) @property def qa_path(self): """Return the path to the top-level QA directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['qadir']) def build_qa(self): """ Generate QA wrappers """ qa.gen_mf_html(self.pypeit_file, self.qa_path) qa.gen_exp_html() # TODO: This should go in a more relevant place def spec_output_file(self, frame, twod=False): """ Return the path to the spectral output data file. Args: frame (:obj:`int`): Frame index from :attr:`fitstbl`. twod (:obj:`bool`): Name for the 2D output file; 1D file otherwise. Returns: :obj:`str`: The path for the output file """ return os.path.join( self.science_path, 'spec{0}d_{1}.fits'.format('2' if twod else '1', self.fitstbl.construct_basename(frame))) def outfile_exists(self, frame): """ Check whether the 2D outfile of a given frame already exists Args: frame (int): Frame index from fitstbl Returns: bool: True if the 2d file exists, False if it does not exist """ return os.path.isfile(self.spec_output_file(frame, twod=True)) def get_std_outfile(self, standard_frames): """ Grab the output filename from an input list of standard_frame indices If more than one index is provided, the first is taken Args: standard_frames (list): List of indices corresponding to standard stars Returns: str: Full path to the standard spec1d output file """ # TODO: Need to decide how to associate standards with # science frames in the case where there is more than one # standard associated with a given science frame. Below, I # just use the first standard std_outfile = None std_frame = None if len(standard_frames) == 0 else standard_frames[0] # Prepare to load up standard? if std_frame is not None: std_outfile = self.spec_output_file(std_frame) \ if isinstance(std_frame, (int,np.integer)) else None if std_outfile is not None and not os.path.isfile(std_outfile): msgs.error('Could not find standard file: {0}'.format(std_outfile)) return std_outfile def reduce_all(self): """ Main driver of the entire reduction Calibration and extraction via a series of calls to reduce_exposure() """ # Validate the parameter set required = [ 'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure', 'fluxcalib' ] can_be_None = ['flexure', 'fluxcalib'] self.par.validate_keys(required=required, can_be_None=can_be_None) self.tstart = time.time() # Find the standard frames is_standard = self.fitstbl.find_frames('standard') # Find the science frames is_science = self.fitstbl.find_frames('science') # Frame indices frame_indx = np.arange(len(self.fitstbl)) # Iterate over each calibration group and reduce the standards for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the standard frames in this calibration group: grp_standards = frame_indx[is_standard & in_grp] # Reduce all the standard frames, loop on unique comb_id u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards]) for j, comb_id in enumerate(u_combid_std): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: std_dict = self.reduce_exposure(frames, bg_frames=bg_frames) # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], std_dict, self.basename) else: msgs.info( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') # Iterate over each calibration group again and reduce the science frames for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the science frames in this calibration group: grp_science = frame_indx[is_science & in_grp] # Associate standards (previously reduced above) for this setup std_outfile = self.get_std_outfile(frame_indx[is_standard]) # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration science_basename = [None] * len(grp_science) # Loop on unique comb_id u_combid = np.unique(self.fitstbl['comb_id'][grp_science]) for j, comb_id in enumerate(u_combid): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] # Find all frames whose comb_id matches the current frames bkg_id. bg_frames = np.where((self.fitstbl['comb_id'] == self.fitstbl['bkg_id'][frames][0]) & (self.fitstbl['comb_id'] >= 0))[0] # JFH changed the syntax below to that above, which allows frames to be used more than once # as a background image. The syntax below would require that we could somehow list multiple # numbers for the bkg_id which is impossible without a comma separated list # bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: sci_dict = self.reduce_exposure(frames, bg_frames=bg_frames, std_outfile=std_outfile) science_basename[j] = self.basename # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], sci_dict, self.basename) else: msgs.warn( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') msgs.info('Finished calibration group {0}'.format(i)) # Finish self.print_end_time() # This is a static method to allow for use in coadding script @staticmethod def select_detectors(detnum=None, ndet=1): """ Return the 1-indexed list of detectors to reduce. Args: detnum (:obj:`int`, :obj:`list`, optional): One or more detectors to reduce. If None, return the full list for the provided number of detectors (`ndet`). ndet (:obj:`int`, optional): The number of detectors for this instrument. Only used if `detnum is None`. Returns: list: List of detectors to be reduced """ if detnum is None: return np.arange(1, ndet + 1).tolist() return [detnum] if isinstance(detnum, int) else detnum def reduce_exposure(self, frames, bg_frames=None, std_outfile=None): """ Reduce a single exposure Args: frame (:obj:`int`): 0-indexed row in :attr:`fitstbl` with the frame to reduce. bg_frames (:obj:`list`, optional): List of frame indices for the background. std_outfile (:obj:`str`, optional): File with a previously reduced standard spectrum from PypeIt. Returns: dict: The dictionary containing the primary outputs of extraction. """ # TODO: # - bg_frames should be None by default # - change doc string to reflect that more than one frame can be # provided # if show is set, clear the ginga channels at the start of each new sci_ID if self.show: # TODO: Put this in a try/except block? ginga.clear_all() has_bg = True if bg_frames is not None and len( bg_frames) > 0 else False # Is this an IR reduction? # TODO: Why specific to IR? self.ir_redux = True if has_bg else False # TODO: JFH Why does this need to be ordered? sci_dict = OrderedDict() # This needs to be ordered sci_dict['meta'] = {} sci_dict['meta']['ir_redux'] = self.ir_redux # Print status message msgs_string = 'Reducing target {:s}'.format( self.fitstbl['target'][frames[0]]) + msgs.newline() # TODO: Print these when the frames are actually combined, # backgrounds are used, etc? msgs_string += 'Combining frames:' + msgs.newline() for iframe in frames: msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() msgs.info(msgs_string) if has_bg: bg_msgs_string = '' for iframe in bg_frames: bg_msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() bg_msgs_string = msgs.newline( ) + 'Using background from frames:' + msgs.newline( ) + bg_msgs_string msgs.info(bg_msgs_string) # Find the detectors to reduce detectors = PypeIt.select_detectors(detnum=self.par['rdx']['detnum'], ndet=self.spectrograph.ndet) if len(detectors) != self.spectrograph.ndet: msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in set(np.arange(self.spectrograph.ndet)) - set(detectors) ]))) # Loop on Detectors for self.det in detectors: msgs.info("Working on detector {0}".format(self.det)) sci_dict[self.det] = {} # Calibrate #TODO Is the right behavior to just use the first frame? self.caliBrate.set_config(frames[0], self.det, self.par['calibrations']) self.caliBrate.run_the_steps() # Extract # TODO: pass back the background frame, pass in background # files as an argument. extract one takes a file list as an # argument and instantiates science within sci_dict[self.det]['sciimg'], sci_dict[self.det]['sciivar'], \ sci_dict[self.det]['skymodel'], sci_dict[self.det]['objmodel'], \ sci_dict[self.det]['ivarmodel'], sci_dict[self.det]['outmask'], \ sci_dict[self.det]['specobjs'], \ = self.extract_one(frames, self.det, bg_frames, std_outfile=std_outfile) # JFH TODO write out the background frame? # Return return sci_dict def get_sci_metadata(self, frame, det): """ Grab the meta data for a given science frame and specific detector Args: frame (int): Frame index det (int): Detector index Returns: 5 objects are returned:: - str: Object type; science or standard - str: Setup string from master_key() - astropy.time.Time: Time of observation - str: Basename of the frame - str: Binning of the detector """ # Set binning, obstime, basename, and objtype binning = self.fitstbl['binning'][frame] obstime = self.fitstbl.construct_obstime(frame) basename = self.fitstbl.construct_basename(frame, obstime=obstime) objtype = self.fitstbl['frametype'][frame] if 'science' in objtype: objtype_out = 'science' elif 'standard' in objtype: objtype_out = 'standard' else: msgs.error('Unrecognized objtype') setup = self.fitstbl.master_key(frame, det=det) return objtype_out, setup, obstime, basename, binning def get_std_trace(self, std_redux, det, std_outfile): """ Returns the trace of the standard if it is applicable to the current reduction Args: std_redux (bool): If False, proceed det (int): Detector index std_outfile (str): Filename for the standard star spec1d file Returns: ndarray or None: Trace of the standard star on input detector """ if std_redux is False and std_outfile is not None: sobjs = specobjs.SpecObjs.from_fitsfile(std_outfile) # Does the detector match? # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we # we would use the standard from another detector this_det = sobjs.DET == det if np.any(this_det): sobjs_det = sobjs[this_det] sobjs_std = sobjs_det.get_std() # No standard extracted on this detector?? if sobjs_std is None: return None std_trace = sobjs_std.TRACE_SPAT # flatten the array if this multislit if 'MultiSlit' in self.spectrograph.pypeline: std_trace = std_trace.flatten() elif 'Echelle' in self.spectrograph.pypeline: std_trace = std_trace.T else: msgs.error('Unrecognized pypeline') else: std_trace = None else: std_trace = None return std_trace def extract_one(self, frames, det, bg_frames, std_outfile=None): """ Extract a single exposure/detector pair sci_ID and det need to have been set internally prior to calling this method Args: frames (list): List of frames to extract; stacked if more than one is provided det (int): bg_frames (list): List of frames to use as the background Can be empty std_outfile (str, optional): Returns: seven objects are returned:: - ndarray: Science image - ndarray: Science inverse variance image - ndarray: Model of the sky - ndarray: Model of the object - ndarray: Model of inverse variance - ndarray: Mask - :obj:`pypeit.specobjs.SpecObjs`: spectra """ # Grab some meta-data needed for the reduction from the fitstbl self.objtype, self.setup, self.obstime, self.basename, self.binning = self.get_sci_metadata( frames[0], det) # Is this a standard star? self.std_redux = 'standard' in self.objtype # Get the standard trace if need be std_trace = self.get_std_trace(self.std_redux, det, std_outfile) # Build Science image sci_files = self.fitstbl.frame_paths(frames) self.sciImg = scienceimage.build_from_file_list( self.spectrograph, det, self.par['scienceframe']['process'], self.caliBrate.msbpm, sci_files, self.caliBrate.msbias, self.caliBrate.mspixelflat, illum_flat=self.caliBrate.msillumflat) # Background Image? if len(bg_frames) > 0: bg_file_list = self.fitstbl.frame_paths(bg_frames) self.sciImg = self.sciImg - scienceimage.build_from_file_list( self.spectrograph, det, self.par['scienceframe']['process'], self.caliBrate.msbpm, bg_file_list, self.caliBrate.msbias, self.caliBrate.mspixelflat, illum_flat=self.caliBrate.msillumflat) # Update mask for slitmask slitmask = pixels.tslits2mask(self.caliBrate.tslits_dict) self.sciImg.update_mask_slitmask(slitmask) # For QA on crash msgs.sciexp = self.sciImg # Instantiate Reduce object self.maskslits = self.caliBrate.tslits_dict['maskslits'].copy() # Required for pypeline specific object # TODO -- caliBrate should be replaced by the ~3 primary Objects needed # once we have the data models in place. self.redux = reduce.instantiate_me(self.sciImg, self.spectrograph, self.par, self.caliBrate, maskslits=self.maskslits, ir_redux=self.ir_redux, std_redux=self.std_redux, objtype=self.objtype, setup=self.setup, show=self.show, det=det, binning=self.binning) # Show? if self.show: self.redux.show('image', image=self.sciImg.image, chname='processed', slits=True, clear=True) # Prep for manual extraction (if requested) manual_extract_dict = self.fitstbl.get_manual_extract(frames, det) self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs = self.redux.run( std_trace=std_trace, manual_extract_dict=manual_extract_dict, show_peaks=self.show, basename=self.basename, ra=self.fitstbl["ra"][frames[0]], dec=self.fitstbl["dec"][frames[0]], obstime=self.obstime) # Return return self.sciImg.image, self.sciImg.ivar, self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs # TODO: Why not use self.frame? def save_exposure(self, frame, sci_dict, basename): """ Save the outputs from extraction for a given exposure Args: frame (:obj:`int`): 0-indexed row in the metadata table with the frame that has been reduced. sci_dict (:obj:`dict`): Dictionary containing the primary outputs of extraction basename (:obj:`str`): The root name for the output file. Returns: None or SpecObjs: All of the objects saved to disk """ # TODO: Need some checks here that the exposure has been reduced # Determine the headers head1d = self.fitstbl[frame] # Need raw file header information rawfile = self.fitstbl.frame_paths(frame) head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext) refframe = 'pixel' if self.caliBrate.par['wavelengths']['reference'] == 'pixel' else \ self.caliBrate.par['wavelengths']['frame'] # Determine the paths/filenames save.save_all(sci_dict, self.caliBrate.master_key_dict, self.caliBrate.master_dir, self.spectrograph, head1d, head2d, self.science_path, basename, update_det=self.par['rdx']['detnum'], binning=self.fitstbl['binning'][frame]) def msgs_reset(self): """ Reset the msgs object """ # Reset the global logger msgs.reset(log=self.logname, verbosity=self.verbosity) msgs.pypeit_file = self.pypeit_file def print_end_time(self): """ Print the elapsed time """ # Capture the end time and print it to user tend = time.time() codetime = tend - self.tstart if codetime < 60.0: msgs.info('Execution time: {0:.2f}s'.format(codetime)) elif codetime / 60.0 < 60.0: mns = int(codetime / 60.0) scs = codetime - 60.0 * mns msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs)) else: hrs = int(codetime / 3600.0) mns = int(60.0 * (codetime / 3600.0 - hrs)) scs = codetime - 60.0 * mns - 3600.0 * hrs msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format( hrs, mns, scs)) # TODO: Move this to fitstbl? def show_science(self): """ Simple print of science frames """ indx = self.fitstbl.find_frames('science') print(self.fitstbl[['target', 'ra', 'dec', 'exptime', 'dispname']][indx]) def __repr__(self): # Generate sets string return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)
class PypeIt(object): """ This class runs the primary calibration and extraction in PypeIt .. todo:: Fill in list of attributes! Args: pypeit_file (:obj:`str`): PypeIt filename. verbosity (:obj:`int`, optional): Verbosity level of system output. Can be: - 0: No output - 1: Minimal output (default) - 2: All output overwrite (:obj:`bool`, optional): Flag to overwrite any existing files/directories. reuse_masters (:obj:`bool`, optional): Reuse any pre-existing calibration files logname (:obj:`str`, optional): The name of an ascii log file with the details of the reduction. show: (:obj:`bool`, optional): Show reduction steps via plots (which will block further execution until clicked on) and outputs to ginga. Requires remote control ginga session via ``ginga --modules=RC &`` redux_path (:obj:`str`, optional): Over-ride reduction path in PypeIt file (e.g. Notebook usage) calib_only: (:obj:`bool`, optional): Only generate the calibration files that you can Attributes: pypeit_file (:obj:`str`): Name of the pypeit file to read. PypeIt files have a specific set of valid formats. A description can be found :ref:`pypeit_file`. fitstbl (:obj:`pypeit.metadata.PypeItMetaData`): holds the meta info """ # __metaclass__ = ABCMeta def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None, show=False, redux_path=None, calib_only=False): # Load cfg_lines, data_files, frametype, usrdata, setups \ = parse_pypeit_file(pypeit_file, runtime=True) self.pypeit_file = pypeit_file self.calib_only = calib_only # Spectrograph cfg = ConfigObj(cfg_lines) spectrograph_name = cfg['rdx']['spectrograph'] self.spectrograph = load_spectrograph(spectrograph_name) msgs.info('Loaded spectrograph {0}'.format( self.spectrograph.spectrograph)) # -------------------------------------------------------------- # Get the full set of PypeIt parameters # - Grab a science or standard file for configuration specific parameters config_specific_file = None for idx, row in enumerate(usrdata): if ('science' in row['frametype']) or ('standard' in row['frametype']): config_specific_file = data_files[idx] # search for arcs, trace if no scistd was there if config_specific_file is None: for idx, row in enumerate(usrdata): if ('arc' in row['frametype']) or ('trace' in row['frametype']): config_specific_file = data_files[idx] if config_specific_file is not None: msgs.info( 'Setting configuration-specific parameters using {0}'.format( os.path.split(config_specific_file)[1])) spectrograph_cfg_lines = self.spectrograph.config_specific_par( config_specific_file).to_config() # - Build the full set, merging with any user-provided # parameters self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines) msgs.info('Built full PypeIt parameter set.') # Check the output paths are ready if redux_path is not None: self.par['rdx']['redux_path'] = redux_path # TODO: Write the full parameter set here? # -------------------------------------------------------------- # -------------------------------------------------------------- # Build the meta data # - Re-initilize based on the file data msgs.info('Compiling metadata') self.fitstbl = PypeItMetaData(self.spectrograph, self.par, files=data_files, usrdata=usrdata, strict=True) # - Interpret automated or user-provided data from the PypeIt # file self.fitstbl.finalize_usr_build(frametype, setups[0]) # -------------------------------------------------------------- # - Write .calib file (For QA naming amongst other things) calib_file = pypeit_file.replace('.pypeit', '.calib') self.fitstbl.write_calib(calib_file) # Other Internals self.logname = logname self.overwrite = overwrite # Currently the runtime argument determines the behavior for # reuse_masters. self.reuse_masters = reuse_masters self.show = show # Set paths self.calibrations_path = os.path.join( self.par['rdx']['redux_path'], self.par['calibrations']['master_dir']) # Check for calibrations if not self.calib_only: calibrations.check_for_calibs( self.par, self.fitstbl, raise_error=self.par['calibrations']['raise_chk_error']) # Report paths msgs.info('Setting reduction path to {0}'.format( self.par['rdx']['redux_path'])) msgs.info('Master calibration data output to: {0}'.format( self.calibrations_path)) msgs.info('Science data output to: {0}'.format(self.science_path)) msgs.info('Quality assessment plots output to: {0}'.format( self.qa_path)) # TODO: Is anything written to the qa dir or only to qa/PNGs? # Should we have separate calibration and science QA # directories? # An html file wrapping them all too # # Instantiate Calibrations class # if self.spectrograph.pypeline in ['MultiSlit', 'Echelle']: # self.caliBrate \ # = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], # self.spectrograph, self.calibrations_path, # qadir=self.qa_path, # reuse_masters=self.reuse_masters, # show=self.show, # slitspat_num=self.par['rdx']['slitspatnum']) # elif self.spectrograph.pypeline in ['IFU']: # self.caliBrate \ # = calibrations.IFUCalibrations(self.fitstbl, self.par['calibrations'], # self.spectrograph, # self.calibrations_path, # qadir=self.qa_path, # reuse_masters=self.reuse_masters, # show=self.show) # else: # msgs.error("No calibration available to support pypeline: {0:s}".format(self.spectrograph.pypeline)) # Init self.verbosity = verbosity # TODO: I don't think this ever used self.det = None self.tstart = None self.basename = None self.sciI = None self.obstime = None @property def science_path(self): """Return the path to the science directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir']) @property def qa_path(self): """Return the path to the top-level QA directory.""" return os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['qadir']) def build_qa(self): """ Generate QA wrappers """ qa.gen_mf_html(self.pypeit_file, self.qa_path) qa.gen_exp_html() # TODO: This should go in a more relevant place def spec_output_file(self, frame, twod=False): """ Return the path to the spectral output data file. Args: frame (:obj:`int`): Frame index from :attr:`fitstbl`. twod (:obj:`bool`): Name for the 2D output file; 1D file otherwise. Returns: :obj:`str`: The path for the output file """ return os.path.join( self.science_path, 'spec{0}d_{1}.fits'.format('2' if twod else '1', self.fitstbl.construct_basename(frame))) def outfile_exists(self, frame): """ Check whether the 2D outfile of a given frame already exists Args: frame (int): Frame index from fitstbl Returns: bool: True if the 2d file exists, False if it does not exist """ return os.path.isfile(self.spec_output_file(frame, twod=True)) def get_std_outfile(self, standard_frames): """ Grab the output filename from an input list of standard_frame indices If more than one index is provided, the first is taken Args: standard_frames (list): List of indices corresponding to standard stars Returns: str: Full path to the standard spec1d output file """ # TODO: Need to decide how to associate standards with # science frames in the case where there is more than one # standard associated with a given science frame. Below, I # just use the first standard std_outfile = None std_frame = None if len(standard_frames) == 0 else standard_frames[0] # Prepare to load up standard? if std_frame is not None: std_outfile = self.spec_output_file(std_frame) \ if isinstance(std_frame, (int,np.integer)) else None if std_outfile is not None and not os.path.isfile(std_outfile): msgs.error('Could not find standard file: {0}'.format(std_outfile)) return std_outfile def calib_all(self): """ Create calibrations for all setups This will not crash if not all of the standard set of files are not provided """ self.tstart = time.time() # Frame indices frame_indx = np.arange(len(self.fitstbl)) for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) grp_frames = frame_indx[in_grp] # Find the detectors to reduce detectors = PypeIt.select_detectors( detnum=self.par['rdx']['detnum'], ndet=self.spectrograph.ndet) # Loop on Detectors for self.det in detectors: # Instantiate Calibrations class self.caliBrate = calibrations.Calibrations.get_instance( self.fitstbl, self.par['calibrations'], self.spectrograph, self.calibrations_path, qadir=self.qa_path, reuse_masters=self.reuse_masters, show=self.show, slitspat_num=self.par['rdx']['slitspatnum']) # Do it self.caliBrate.set_config(grp_frames[0], self.det, self.par['calibrations']) self.caliBrate.run_the_steps() # Finish self.print_end_time() def reduce_all(self): """ Main driver of the entire reduction Calibration and extraction via a series of calls to reduce_exposure() """ # Validate the parameter set self.par.validate_keys(required=[ 'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure' ]) self.tstart = time.time() # Find the standard frames is_standard = self.fitstbl.find_frames('standard') # Find the science frames is_science = self.fitstbl.find_frames('science') # Frame indices frame_indx = np.arange(len(self.fitstbl)) # Iterate over each calibration group and reduce the standards for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the standard frames in this calibration group: grp_standards = frame_indx[is_standard & in_grp] # Reduce all the standard frames, loop on unique comb_id u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards]) for j, comb_id in enumerate(u_combid_std): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: std_spec2d, std_sobjs = self.reduce_exposure( frames, bg_frames=bg_frames) # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], std_spec2d, std_sobjs, self.basename) else: msgs.info( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') # Iterate over each calibration group again and reduce the science frames for i in range(self.fitstbl.n_calib_groups): # Find all the frames in this calibration group in_grp = self.fitstbl.find_calib_group(i) # Find the indices of the science frames in this calibration group: grp_science = frame_indx[is_science & in_grp] # Associate standards (previously reduced above) for this setup std_outfile = self.get_std_outfile(frame_indx[is_standard]) # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration science_basename = [None] * len(grp_science) # Loop on unique comb_id u_combid = np.unique(self.fitstbl['comb_id'][grp_science]) for j, comb_id in enumerate(u_combid): frames = np.where(self.fitstbl['comb_id'] == comb_id)[0] # Find all frames whose comb_id matches the current frames bkg_id. bg_frames = np.where((self.fitstbl['comb_id'] == self.fitstbl['bkg_id'][frames][0]) & (self.fitstbl['comb_id'] >= 0))[0] # JFH changed the syntax below to that above, which allows frames to be used more than once # as a background image. The syntax below would require that we could somehow list multiple # numbers for the bkg_id which is impossible without a comma separated list # bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0] if not self.outfile_exists(frames[0]) or self.overwrite: # TODO -- Should we reset/regenerate self.slits.mask for a new exposure sci_spec2d, sci_sobjs = self.reduce_exposure( frames, bg_frames=bg_frames, std_outfile=std_outfile) science_basename[j] = self.basename # TODO come up with sensible naming convention for save_exposure for combined files self.save_exposure(frames[0], sci_spec2d, sci_sobjs, self.basename) else: msgs.warn( 'Output file: {:s} already exists'.format( self.fitstbl.construct_basename(frames[0])) + '. Set overwrite=True to recreate and overwrite.') msgs.info('Finished calibration group {0}'.format(i)) # Check if this is an IFU reduction. If so, make a datacube if self.spectrograph.pypeline == "IFU" and self.par['reduce']['cube'][ 'make_cube']: msgs.work("Generate datacube") # Finish self.print_end_time() # This is a static method to allow for use in coadding script @staticmethod def select_detectors(detnum=None, ndet=1, slitspatnum=None): """ Return the 1-indexed list of detectors to reduce. Args: detnum (:obj:`int`, :obj:`list`, optional): One or more detectors to reduce. If None, return the full list for the provided number of detectors (`ndet`). ndet (:obj:`int`, optional): The number of detectors for this instrument. Only used if `detnum is None`. Returns: list: List of detectors to be reduced """ if detnum is not None and slitspatnum is not None: msgs.error( "You cannot specify both detnum and slitspatnum. Too painful for over-writing SpecObjs" ) if detnum is None and slitspatnum is None: return np.arange(1, ndet + 1).tolist() elif detnum is not None: return np.atleast_1d(detnum).tolist() else: return slittrace.parse_slitspatnum(slitspatnum)[0].tolist() def reduce_exposure(self, frames, bg_frames=None, std_outfile=None): """ Reduce a single exposure Args: frame (:obj:`int`): 0-indexed row in :attr:`fitstbl` with the frame to reduce. bg_frames (:obj:`list`, optional): List of frame indices for the background. std_outfile (:obj:`str`, optional): File with a previously reduced standard spectrum from PypeIt. Returns: dict: The dictionary containing the primary outputs of extraction. """ # TODO: # - change doc string to reflect that more than one frame can be # provided # if show is set, clear the ginga channels at the start of each new sci_ID if self.show: # TODO: Put this in a try/except block? display.clear_all() has_bg = True if bg_frames is not None and len( bg_frames) > 0 else False # Is this an IR reduction? # TODO: Why specific to IR? self.ir_redux = True if has_bg else False # Container for all the Spec2DObj all_spec2d = spec2dobj.AllSpec2DObj() all_spec2d['meta']['ir_redux'] = self.ir_redux # TODO -- Should we reset/regenerate self.slits.mask for a new exposure all_specobjs = specobjs.SpecObjs() # Print status message msgs_string = 'Reducing target {:s}'.format( self.fitstbl['target'][frames[0]]) + msgs.newline() # TODO: Print these when the frames are actually combined, # backgrounds are used, etc? msgs_string += 'Combining frames:' + msgs.newline() for iframe in frames: msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() msgs.info(msgs_string) if has_bg: bg_msgs_string = '' for iframe in bg_frames: bg_msgs_string += '{0:s}'.format( self.fitstbl['filename'][iframe]) + msgs.newline() bg_msgs_string = msgs.newline( ) + 'Using background from frames:' + msgs.newline( ) + bg_msgs_string msgs.info(bg_msgs_string) # Find the detectors to reduce detectors = PypeIt.select_detectors( detnum=self.par['rdx']['detnum'], slitspatnum=self.par['rdx']['slitspatnum'], ndet=self.spectrograph.ndet) if len(detectors) != self.spectrograph.ndet: msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in set(np.arange(self.spectrograph.ndet)) - set(detectors) ]))) # Loop on Detectors # TODO: Attempt to put in a multiprocessing call here? for self.det in detectors: msgs.info("Working on detector {0}".format(self.det)) # Instantiate Calibrations class self.caliBrate = calibrations.Calibrations.get_instance( self.fitstbl, self.par['calibrations'], self.spectrograph, self.calibrations_path, qadir=self.qa_path, reuse_masters=self.reuse_masters, show=self.show, slitspat_num=self.par['rdx']['slitspatnum']) # These need to be separate to accomodate COADD2D self.caliBrate.set_config(frames[0], self.det, self.par['calibrations']) self.caliBrate.run_the_steps() # Extract # TODO: pass back the background frame, pass in background # files as an argument. extract one takes a file list as an # argument and instantiates science within all_spec2d[self.det], tmp_sobjs \ = self.reduce_one(frames, self.det, bg_frames, std_outfile=std_outfile) # Hold em if tmp_sobjs.nobj > 0: all_specobjs.add_sobj(tmp_sobjs) # JFH TODO write out the background frame? # TODO -- Save here? Seems like we should. Would probably need to use update_det=True # Return return all_spec2d, all_specobjs def get_sci_metadata(self, frame, det): """ Grab the meta data for a given science frame and specific detector Args: frame (int): Frame index det (int): Detector index Returns: 5 objects are returned:: - str: Object type; science or standard - str: Setup string from master_key() - astropy.time.Time: Time of observation - str: Basename of the frame - str: Binning of the detector """ # Set binning, obstime, basename, and objtype binning = self.fitstbl['binning'][frame] obstime = self.fitstbl.construct_obstime(frame) basename = self.fitstbl.construct_basename(frame, obstime=obstime) objtype = self.fitstbl['frametype'][frame] if 'science' in objtype: objtype_out = 'science' elif 'standard' in objtype: objtype_out = 'standard' else: msgs.error('Unrecognized objtype') setup = self.fitstbl.master_key(frame, det=det) return objtype_out, setup, obstime, basename, binning def get_std_trace(self, std_redux, det, std_outfile): """ Returns the trace of the standard if it is applicable to the current reduction Args: std_redux (bool): If False, proceed det (int): Detector index std_outfile (str): Filename for the standard star spec1d file Returns: ndarray or None: Trace of the standard star on input detector """ if std_redux is False and std_outfile is not None: sobjs = specobjs.SpecObjs.from_fitsfile(std_outfile) # Does the detector match? # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we # we would use the standard from another detector this_det = sobjs.DET == det if np.any(this_det): sobjs_det = sobjs[this_det] sobjs_std = sobjs_det.get_std() # No standard extracted on this detector?? if sobjs_std is None: return None std_trace = sobjs_std.TRACE_SPAT # flatten the array if this multislit if 'MultiSlit' in self.spectrograph.pypeline: std_trace = std_trace.flatten() elif 'Echelle' in self.spectrograph.pypeline: std_trace = std_trace.T else: msgs.error('Unrecognized pypeline') else: std_trace = None else: std_trace = None return std_trace def reduce_one(self, frames, det, bg_frames, std_outfile=None): """ Reduce + Extract a single exposure/detector pair sci_ID and det need to have been set internally prior to calling this method Args: frames (:obj:`list`): List of frames to extract; stacked if more than one is provided det (:obj:`int`): Detector number (1-indexed) bg_frames (:obj:`list`): List of frames to use as the background. Can be empty. std_outfile (:obj:`str`, optional): Filename for the standard star spec1d file. Passed directly to :func:`get_std_trace`. Returns: tuple: Returns six `numpy.ndarray`_ objects and a :class:`pypeit.specobjs.SpecObjs` object with the extracted spectra from this exposure/detector pair. The six `numpy.ndarray`_ objects are (1) the science image, (2) its inverse variance, (3) the sky model, (4) the object model, (5) the model inverse variance, and (6) the mask. """ # Grab some meta-data needed for the reduction from the fitstbl self.objtype, self.setup, self.obstime, self.basename, self.binning \ = self.get_sci_metadata(frames[0], det) msgs.info("Extraction begins for {} on det={}".format( self.basename, det)) # Is this a standard star? self.std_redux = 'standard' in self.objtype if self.std_redux: frame_par = self.par['calibrations']['standardframe'] else: frame_par = self.par['scienceframe'] # Get the standard trace if need be std_trace = self.get_std_trace(self.std_redux, det, std_outfile) # Build Science image sci_files = self.fitstbl.frame_paths(frames) sciImg = buildimage.buildimage_fromlist( self.spectrograph, det, frame_par, sci_files, bias=self.caliBrate.msbias, bpm=self.caliBrate.msbpm, dark=self.caliBrate.msdark, flatimages=self.caliBrate.flatimages, slits=self.caliBrate.slits, # For flexure correction ignore_saturation=False) # Background Image? if len(bg_frames) > 0: bg_file_list = self.fitstbl.frame_paths(bg_frames) sciImg = sciImg.sub( buildimage.buildimage_fromlist( self.spectrograph, det, frame_par, bg_file_list, bpm=self.caliBrate.msbpm, bias=self.caliBrate.msbias, dark=self.caliBrate.msdark, flatimages=self.caliBrate.flatimages, slits=self.caliBrate.slits, # For flexure correction ignore_saturation=False), frame_par['process']) # Instantiate Reduce object # Required for pypeline specific object # At instantiaton, the fullmask in self.sciImg is modified self.redux = reduce.Reduce.get_instance(sciImg, self.spectrograph, self.par, self.caliBrate, self.objtype, ir_redux=self.ir_redux, std_redux=self.std_redux, setup=self.setup, show=self.show, det=det, binning=self.binning, std_outfile=std_outfile, basename=self.basename) # Show? if self.show: self.redux.show('image', image=sciImg.image, chname='processed', slits=True, clear=True) # Do it skymodel, objmodel, ivarmodel, outmask, sobjs, scaleImg, waveImg, tilts = self.redux.run( std_trace=std_trace, show_peaks=self.show, ra=self.fitstbl["ra"][frames[0]], dec=self.fitstbl["dec"][frames[0]], obstime=self.obstime) # TODO -- Save the slits yet again? # TODO -- Do this upstream # Tack on detector for sobj in sobjs: sobj.DETECTOR = sciImg.detector # Construct the Spec2DObj spec2DObj = spec2dobj.Spec2DObj(det=self.det, sciimg=sciImg.image, ivarraw=sciImg.ivar, skymodel=skymodel, objmodel=objmodel, ivarmodel=ivarmodel, scaleimg=scaleImg, waveimg=waveImg, bpmmask=outmask, detector=sciImg.detector, sci_spat_flexure=sciImg.spat_flexure, tilts=tilts, slits=copy.deepcopy( self.caliBrate.slits)) spec2DObj.process_steps = sciImg.process_steps # Return return spec2DObj, sobjs def save_exposure(self, frame, all_spec2d, all_specobjs, basename): """ Save the outputs from extraction for a given exposure Args: frame (:obj:`int`): 0-indexed row in the metadata table with the frame that has been reduced. all_spec2d(:class:`pypeit.spec2dobj.AllSpec2DObj`): sci_dict (:obj:`dict`): Dictionary containing the primary outputs of extraction basename (:obj:`str`): The root name for the output file. Returns: None or SpecObjs: All of the objects saved to disk """ # TODO: Need some checks here that the exposure has been reduced? # Determine the headers row_fitstbl = self.fitstbl[frame] # Need raw file header information rawfile = self.fitstbl.frame_paths(frame) head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext) # Check for the directory if not os.path.isdir(self.science_path): os.makedirs(self.science_path) subheader = self.spectrograph.subheader_for_spec(row_fitstbl, head2d) # 1D spectra if all_specobjs.nobj > 0: # Spectra outfile1d = os.path.join(self.science_path, 'spec1d_{:s}.fits'.format(basename)) all_specobjs.write_to_fits( subheader, outfile1d, update_det=self.par['rdx']['detnum'], slitspatnum=self.par['rdx']['slitspatnum']) # Info outfiletxt = os.path.join(self.science_path, 'spec1d_{:s}.txt'.format(basename)) all_specobjs.write_info(outfiletxt, self.spectrograph.pypeline) # 2D spectra outfile2d = os.path.join(self.science_path, 'spec2d_{:s}.fits'.format(basename)) # Build header pri_hdr = all_spec2d.build_primary_hdr( head2d, self.spectrograph, redux_path=self.par['rdx']['redux_path'], master_key_dict=self.caliBrate.master_key_dict, master_dir=self.caliBrate.master_dir, subheader=subheader) # Write all_spec2d.write_to_fits(outfile2d, pri_hdr=pri_hdr, update_det=self.par['rdx']['detnum']) def msgs_reset(self): """ Reset the msgs object """ # Reset the global logger msgs.reset(log=self.logname, verbosity=self.verbosity) msgs.pypeit_file = self.pypeit_file def print_end_time(self): """ Print the elapsed time """ # Capture the end time and print it to user tend = time.time() codetime = tend - self.tstart if codetime < 60.0: msgs.info('Execution time: {0:.2f}s'.format(codetime)) elif codetime / 60.0 < 60.0: mns = int(codetime / 60.0) scs = codetime - 60.0 * mns msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs)) else: hrs = int(codetime / 3600.0) mns = int(60.0 * (codetime / 3600.0 - hrs)) scs = codetime - 60.0 * mns - 3600.0 * hrs msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format( hrs, mns, scs)) # TODO: Move this to fitstbl? def show_science(self): """ Simple print of science frames """ indx = self.fitstbl.find_frames('science') print(self.fitstbl[['target', 'ra', 'dec', 'exptime', 'dispname']][indx]) def __repr__(self): # Generate sets string return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)