コード例 #1
0
ファイル: pypeit.py プロジェクト: catherinemanea/PypeIt
class PypeIt(object):
    """
    This class runs the primary calibration and extraction in PypeIt

    Args:
        pypeit_file (:obj:`str`):  PypeIt filename
        verbosity (:obj:`int`, optional):
            Verbosity level of system output.  Can be::
                - 0: No output
                - 1: Minimal output (default)
                - 2: All output
        overwrite (:obj:`bool`, optional):
            Flag to overwrite any existing files/directories.
        reuse_masters (bool, optional): Reuse any pre-existing calibration files
        logname (:obj:`str`, optional):
            The name of an ascii log file with the details of the
            reduction.
        redux_path (:obj:`str`, optional):
            Over-ride reduction path in PypeIt file (e.g. Notebook usage)
        show: (:obj:`bool`, optional):
            Show reduction steps via plots (which will block further
            execution until clicked on) and outputs to ginga. Requires
            remote control ginga session via "ginga --modules=RC &"

    Attributes:
        pypeit_file (:obj:`str`):
            Name of the pypeit file to read.  PypeIt files have a specific
            set of valid formats. A description can be found `here`_
            (include doc link).
        fitstbl (:obj:`pypit.metadata.PypeItMetaData`): holds the meta info
    """
#    __metaclass__ = ABCMeta

    def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None,
                 show=False, redux_path=None):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)

        # Par
        # Defaults
        spectrograph_def_par = self.spectrograph.default_pypeit_par()
        # Grab a science file for configuration specific parameters
        sci_file = None
        for idx, row in enumerate(usrdata):
            if 'science' in row['frametype']:
                sci_file = data_files[idx]
                break

        # Set
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(spectrograph_def_par, sci_file).to_config()
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

        # Fitstbl
        self.fitstbl = PypeItMetaData(self.spectrograph, self.par, file_list=data_files,
                                      usrdata=usrdata, strict=True)

        # The following could be put in a prepare_to_run() method in PypeItMetaData
        if 'setup' not in self.fitstbl.keys():
            self.fitstbl['setup'] = setups[0]
        self.fitstbl.get_frame_types(user=frametype)  # This sets them using the user inputs
        self.fitstbl.set_defaults()  # Only does something if values not set in PypeIt file
        self.fitstbl._set_calib_group_bits()
        self.fitstbl._check_calib_groups()
        # Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite
        # Currently the runtime argument determines the behavior for reuse_masters. There is also a reuse_masters
        # parameter in the parset but it is currently ignored.
        self.reuse_masters=reuse_masters
        self.show = show

        # Make the output directories
        self.par['rdx']['redux_path'] = os.getcwd() if redux_path is None else redux_path
        msgs.info("Setting reduction path to {:s}".format(self.par['rdx']['redux_path']))
        paths.make_dirs(self.spectrograph.spectrograph, self.par['calibrations']['caldir'],
                        self.par['rdx']['scidir'], self.par['rdx']['qadir'],
                        overwrite=self.overwrite, redux_path=self.par['rdx']['redux_path'])

        # Instantiate Calibrations class
        self.caliBrate \
            = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], self.spectrograph,
                                                 redux_path=self.par['rdx']['redux_path'],
                                                 reuse_masters=self.reuse_masters,
                                                 save_masters=True, write_qa=True,
                                                 show=self.show)
        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.frame = None
        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None

    def build_qa(self):
        """
        Generate QA wrappers
        """
        qa.gen_mf_html(self.pypeit_file)
        qa.gen_exp_html()

    def outfile_exists(self, frame):
        """
        Check whether the 2D outfile of a given frame already exists

        Args:
            frame (int): Frame index from fitstbl

        Returns:
            bool: True if the 2d file exists
                 False if it does not exist
        """
        # Check if the 2d output file exists
        scidir = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'])
        basename = self.fitstbl.construct_basename(frame)
        outfile = scidir + '/spec2d_{:s}.fits'.format(basename)
        return os.path.isfile(outfile)

    def get_std_outfile(self, standard_frames):
        """
        Grab the output filename from an input list of standard_frame indices

        If more than one index is provided, the first is taken

        Args:
            standard_frames (list): List of indices corresponding to standard stars

        Returns:
            str: Full path to the standard spec1d output file

        """
        # TODO: Need to decide how to associate standards with
        # science frames in the case where there is more than one
        # standard associated with a given science frame.  Below, I
        # just use the first standard

        std_outfile = None
        std_frame = None if len(standard_frames) == 0 else standard_frames[0]
        # Prepare to load up standard?
        if std_frame is not None:
            std_outfile = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'],
            'spec1d_{:s}.fits'.format(self.fitstbl.construct_basename(std_frame))) \
            if isinstance(std_frame, (int,np.integer)) else None

        if std_outfile is not None and not os.path.isfile(std_outfile):
            msgs.error('Could not find standard file: {0}'.format(std_outfile))

        return std_outfile

    def reduce_all(self):
        """
        Main driver of the entire reduction

        Calibration and extraction via a series of calls to reduce_exposure()

        """
        # Validate the parameter set
        required = ['rdx', 'calibrations', 'scienceframe', 'scienceimage', 'flexure', 'fluxcalib']
        can_be_None = ['flexure', 'fluxcalib']
        self.par.validate_keys(required=required, can_be_None=can_be_None)

        self.tstart = time.time()

        # Find the standard frames
        is_standard = self.fitstbl.find_frames('standard')

        # Find the science frames
        is_science = self.fitstbl.find_frames('science')

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))

        # Iterate over each calibration group and reduce the standards
        for i in range(self.fitstbl.n_calib_groups):

            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the standard frames in this calibration group:
            grp_standards = frame_indx[is_standard & in_grp]

            # Reduce all the standard frames, loop on unique comb_id
            u_combid_std= np.unique(self.fitstbl['comb_id'][grp_standards])
            for j, comb_id in enumerate(u_combid_std):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    std_dict = self.reduce_exposure(frames, bg_frames=bg_frames)
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], std_dict, self.basename)
                else:
                    msgs.info('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) +
                              '. Set overwrite=True to recreate and overwrite.')

        # Iterate over each calibration group again and reduce the science frames
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the science frames in this calibration group:
            grp_science = frame_indx[is_science & in_grp]
            # Associate standards (previously reduced above) for this setup
            std_outfile = self.get_std_outfile(frame_indx[is_standard])
            # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration
            science_basename = [None]*len(grp_science)
            # Loop on unique comb_id
            u_combid = np.unique(self.fitstbl['comb_id'][grp_science])
            for j, comb_id in enumerate(u_combid):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    sci_dict = self.reduce_exposure(frames, bg_frames=bg_frames, std_outfile=std_outfile)
                    science_basename[j] = self.basename
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], sci_dict, self.basename)
                else:
                    msgs.warn('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) +
                              '. Set overwrite=True to recreate and overwrite.')

            msgs.info('Finished calibration group {0}'.format(i))

        # Finish
        self.print_end_time()


    def select_detectors(self):
        """
        Return the 1-indexed list of detectors to reduce.

        Returns:
            list:  List of detectors to be reduced

        """
        if self.par['rdx']['detnum'] is None:
            return np.arange(self.spectrograph.ndet)+1
        return [self.par['rdx']['detnum']] if isinstance(self.par['rdx']['detnum'], int) \
                    else self.par['rdx']['detnum']

    def reduce_exposure(self, frames, bg_frames=[], std_outfile=None):
        """
        Reduce a single exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in :attr:`fitstbl` with the frame to
                reduce
            bgframes (:obj:`list`, optional):
                List of frame indices for the background
            std_outfile (:obj:`str`, optional):
                the name of a file with a previously PypeIt-reduced standard spectrum.

        Returns:
            dict: The dictionary containing the primary outputs of extraction
        """
        # if show is set, clear the ginga channels at the start of each new sci_ID
        if self.show:
            ginga.clear_all()

        # Save the frame
        self.frames = frames
        self.bg_frames = bg_frames
        # Is this an IR reduction?
        self.ir_redux = True if len(bg_frames) > 0 else False

        # JFH Why does this need to be ordered?
        sci_dict = OrderedDict()  # This needs to be ordered
        sci_dict['meta'] = {}
        sci_dict['meta']['vel_corr'] = 0.
        sci_dict['meta']['ir_redux'] = self.ir_redux

        # Print status message
        msgs_string = 'Reducing target {:s}'.format(self.fitstbl['target'][self.frames[0]]) + msgs.newline()
        msgs_string += 'Combining frames:' + msgs.newline()
        for iframe in self.frames:
            msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
        msgs.info(msgs_string)
        if len(bg_frames) > 0:
            bg_msgs_string = ''
            for iframe in self.bg_frames:
                bg_msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
            bg_msgs_string = msgs.newline() + 'Using background from frames:' + msgs.newline() + bg_msgs_string
            msgs.info(bg_msgs_string)

        # Find the detectors to reduce
        detectors = self.select_detectors()
        if len(detectors) != self.spectrograph.ndet:
            msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in 
                                set(np.arange(self.spectrograph.ndet))-set(detectors)])))

        # Loop on Detectors
        for self.det in detectors:
            msgs.info("Working on detector {0}".format(self.det))
            sci_dict[self.det] = {}
            # Calibrate
            #TODO Is the right behavior to just use the first frame?
            self.caliBrate.set_config(self.frames[0], self.det, self.par['calibrations'])
            self.caliBrate.run_the_steps()
            # Extract
            # TODO: pass back the background frame, pass in background
            # files as an argument. extract one takes a file list as an
            # argument and instantiates science within
            sci_dict[self.det]['sciimg'], sci_dict[self.det]['sciivar'], sci_dict[self.det]['skymodel'], \
                sci_dict[self.det]['objmodel'], sci_dict[self.det]['ivarmodel'], sci_dict[self.det]['outmask'], \
                sci_dict[self.det]['specobjs'], vel_corr \
                    = self.extract_one(self.frames, self.det, bg_frames = self.bg_frames, std_outfile = std_outfile)
            if vel_corr is not None:
                sci_dict['meta']['vel_corr'] = vel_corr

            # JFH TODO write out the background frame?

        # Return
        return sci_dict

    def flexure_correct(self, sobjs, maskslits):
        """
        Correct for flexure

        Spectra are modified in place (wavelengths are shifted)

        Args:
            sobjs (SpecObjs):
            maskslits (ndarray): Mask of SpecObjs

        """

        if self.par['flexure']['method'] != 'skip':
            flex_list = wave.flexure_obj(sobjs, maskslits, self.par['flexure']['method'],
                                         self.par['flexure']['spectrum'],
                                         mxshft=self.par['flexure']['maxshift'])
            # QA
            wave.flexure_qa(sobjs, maskslits, self.basename, self.det, flex_list,
                            out_dir=self.par['rdx']['redux_path'])
        else:
            msgs.info('Skipping flexure correction.')

    def helio_correct(self, sobjs, maskslits, frame, obstime):
        """
        Perform a heliocentric correction on a set of spectra

        Args:
            sobjs (pypeit.specobjs.SpecObjs): Spectra
            maskslits (ndarray): Slits that are masked
            frame (int): Frame to use for meta info
            obstime (astropy.time.Time):

        Returns:
            astropy.units.Quantity: Velocity correction in km/s

        """
        # Helio, correct Earth's motion
        if (self.caliBrate.par['wavelengths']['frame'] in ['heliocentric', 'barycentric']) \
                and (self.caliBrate.par['wavelengths']['reference'] != 'pixel'):
            # TODO change this keyword to refframe instead of frame
            msgs.info("Performing a {0} correction".format(self.caliBrate.par['wavelengths']['frame']))
            vel, vel_corr = wave.geomotion_correct(sobjs, maskslits, self.fitstbl, frame, obstime,
                                                   self.spectrograph.telescope['longitude'],
                                                   self.spectrograph.telescope['latitude'],
                                                   self.spectrograph.telescope['elevation'],
                                                   self.caliBrate.par['wavelengths']['frame'])
        else:
            msgs.info('A wavelength reference-frame correction will not be performed.')
            vel_corr = None

        return vel_corr

    def get_sci_metadata(self, frame, det):
        """
        Grab the meta data for a given science frame and specific detector

        Args:
            frame (int): Frame index
            det (int): Detector index

        Returns:
            5 objects are returned::
                - str: Object type;  science or standard
                - str: Setup string from master_key()
                - astropy.time.Time: Time of observation
                - str: Basename of the frame
                - str: Binning of the detector

        """

        # Set binning, obstime, basename, and objtype
        binning = self.fitstbl['binning'][frame]
        obstime  = self.fitstbl.construct_obstime(frame)
        basename = self.fitstbl.construct_basename(frame, obstime=obstime)
        objtype  = self.fitstbl['frametype'][frame]
        if 'science' in objtype:
            objtype_out = 'science'
        elif 'standard' in objtype:
            objtype_out = 'standard'
        else:
            msgs.error('Unrecognized objtype')
        setup = self.fitstbl.master_key(frame, det=det)
        return objtype_out, setup, obstime, basename, binning

    def get_std_trace(self, std_redux, det, std_outfile):
        """
        Returns the trace of the standard if it is applicable to the current reduction

        Args:
            std_redux (bool): If False, proceed
            det (int): Detector index
            std_outfile (str): Filename for the standard star spec1d file

        Returns:
            ndarray: Trace of the standard star on input detector

        """
        if std_redux is False and std_outfile is not None:
            sobjs, hdr_std = load.load_specobjs(std_outfile)
            # Does the detector match?
            # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we
            # we would use the standard from another detector
            this_det = sobjs.det == det
            if np.any(this_det):
                sobjs_det = sobjs[this_det]
                sobjs_std = sobjs_det.get_std()
                std_trace = sobjs_std.trace_spat
                # flatten the array if this multislit
                if 'MultiSlit' in self.spectrograph.pypeline:
                    std_trace = std_trace.flatten()
                elif 'Echelle' in self.spectrograph.pypeline:
                    std_trace = std_trace.T
                else:
                    msgs.error('Unrecognized pypeline')
            else:
                std_trace = None
        else:
            std_trace = None

        return std_trace

    def extract_one(self, frames, det, bg_frames=[], std_outfile=None):
        """
        Extract a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Args:
            frames (list):  List of frames to extract;  stacked if more than one is provided
            det (int):
            bg_frames (list, optional): List of frames to use as the background
            std_outfile (str, optional):

        Returns:
            eight objects are returned::
                - ndarray: Science image
                - ndarray: Science inverse variance image
                - ndarray: Model of the sky
                - ndarray: Model of the object
                - ndarray: Model of inverse variance
                - ndarray: Mask
                - :obj:`pypeit.specobjs.SpecObjs`: spectra
                - astropy.units.Quantity: velocity correction

        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning = self.get_sci_metadata(frames[0], det)
        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype
        # Get the standard trace if need be
        std_trace = self.get_std_trace(self.std_redux, det, std_outfile)
        # Instantiate ScienceImage for the files we will reduce
        sci_files = self.fitstbl.frame_paths(frames)
        self.sciI = scienceimage.ScienceImage(self.spectrograph, sci_files,
                                              bg_file_list=self.fitstbl.frame_paths(bg_frames),
                                              ir_redux = self.ir_redux,
                                              par=self.par['scienceframe'],
                                              det=det,
                                              binning=self.binning)
        # For QA on crash.
        msgs.sciexp = self.sciI

        # Process images (includes inverse variance image, rn2 image, and CR mask)
        self.sciimg, self.sciivar, self.rn2img, self.mask, self.crmask = \
            self.sciI.proc(self.caliBrate.msbias, self.caliBrate.mspixflatnrm.copy(),
                           self.caliBrate.msbpm, illum_flat=self.caliBrate.msillumflat,
                           show=self.show)
        # Object finding, first pass on frame without sky subtraction
        self.maskslits = self.caliBrate.maskslits.copy()

        self.redux = reduce.instantiate_me(self.spectrograph, self.caliBrate.tslits_dict,
                                           self.mask, self.par,
                                           ir_redux = self.ir_redux,
                                           objtype=self.objtype, setup=self.setup,
                                           det=det, binning=self.binning)

        # Prep for manual extraction (if requested)
        manual_extract_dict = self.fitstbl.get_manual_extract(frames, det)

        # Do one iteration of object finding, and sky subtract to get initial sky model
        self.sobjs_obj, self.nobj, skymask_init = \
            self.redux.find_objects(self.sciimg, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux,
                                    std_trace=std_trace,maskslits=self.maskslits,
                                    show=self.show & (not self.std_redux),
                                    manual_extract_dict=manual_extract_dict)

        # Global sky subtraction, first pass. Uses skymask from object finding step above
        self.initial_sky = \
            self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], skymask=skymask_init,
                                    std=self.std_redux, maskslits=self.maskslits, show=self.show)

        if not self.std_redux:
            # Object finding, second pass on frame *with* sky subtraction. Show here if requested
            self.sobjs_obj, self.nobj, self.skymask = \
                self.redux.find_objects(self.sciimg - self.initial_sky, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux,
                                  std_trace=std_trace,maskslits=self.maskslits,show=self.show,
                                        manual_extract_dict=manual_extract_dict)

        # If there are objects, do 2nd round of global_skysub, local_skysub_extract, flexure, geo_motion
        if self.nobj > 0:
            # Global sky subtraction second pass. Uses skymask from object finding
            self.global_sky = self.initial_sky if self.std_redux else \
                self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'],
                skymask=self.skymask, maskslits=self.maskslits, show=self.show)

            self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs = \
            self.redux.local_skysub_extract(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], self.caliBrate.mswave,
                                            self.global_sky, self.rn2img, self.sobjs_obj,
                                            model_noise=(not self.ir_redux),std = self.std_redux,
                                            maskslits=self.maskslits, show_profile=self.show,show=self.show)

            # Purge out the negative objects if this was a near-IR reduction.
            # TODO should we move this purge call to local_skysub_extract??
            if self.ir_redux:
                self.sobjs.purge_neg()

            # Flexure correction if this is not a standard star
            if not self.std_redux:
                self.redux.flexure_correct(self.sobjs, self.basename)

            # Grab coord
            radec = ltu.radec_to_coord((self.fitstbl["ra"][frames[0]], self.fitstbl["dec"][frames[0]]))
            self.vel_corr = self.redux.helio_correct(self.sobjs, radec, self.obstime)

        else:
            # Print status message
            msgs_string = 'No objects to extract for target {:s}'.format(self.fitstbl['target'][frames[0]]) + msgs.newline()
            msgs_string += 'On frames:' + msgs.newline()
            for iframe in frames:
                msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
            msgs.warn(msgs_string)
            # set to first pass global sky
            self.skymodel = self.initial_sky
            self.objmodel = np.zeros_like(self.sciimg)
            # Set to sciivar. Could create a model but what is the point?
            self.ivarmodel = np.copy(self.sciivar)
            # Set to the initial mask in case no objects were found
            self.outmask = self.redux.mask
            # empty specobjs object from object finding
            if self.ir_redux:
                self.sobjs_obj.purge_neg()
            self.sobjs = self.sobjs_obj
            self.vel_corr = None

        return self.sciimg, self.sciivar, self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs, self.vel_corr

    # TODO: Why not use self.frame?
    def save_exposure(self, frame, sci_dict, basename):
        """
        Save the outputs from extraction for a given exposure

        Args:
            frame (:obj:`int`):
              0-indexed row in the metadata table with the frame that
              has been reduced.
            sci_dict (:obj:`dict`):
              Dictionary containing the primary outputs of extraction
            basename (:obj:`str`):
                The root name for the output file.

        Returns:
            None or SpecObjs:  All of the objects saved to disk

        """
        # TODO: Need some checks here that the exposure has been reduced

        # Determine the headers
        head1d = self.fitstbl[frame]
        # Need raw file header information
        rawfile = self.fitstbl.frame_paths(frame)
        head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext,)
        refframe = 'pixel' if self.caliBrate.par['wavelengths']['reference'] == 'pixel' else \
            self.caliBrate.par['wavelengths']['frame']

        # Determine the paths/filenames
        scipath = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'])

        save.save_all(sci_dict, self.caliBrate.master_key_dict, self.caliBrate.master_dir, self.spectrograph,
                      head1d, head2d, scipath, basename, refframe=refframe,
                      update_det=self.par['rdx']['detnum'], binning=self.fitstbl['binning'][frame])

        return



    def msgs_reset(self):
        """
        Reset the msgs object
        """

        # Reset the global logger
        msgs.reset(log=self.logname, verbosity=self.verbosity)
        msgs.pypeit_file = self.pypeit_file

    def print_end_time(self):
        """
        Print the elapsed time
        """
        # Capture the end time and print it to user
        tend = time.time()
        codetime = tend-self.tstart
        if codetime < 60.0:
            msgs.info('Execution time: {0:.2f}s'.format(codetime))
        elif codetime/60.0 < 60.0:
            mns = int(codetime/60.0)
            scs = codetime - 60.0*mns
            msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs))
        else:
            hrs = int(codetime/3600.0)
            mns = int(60.0*(codetime/3600.0 - hrs))
            scs = codetime - 60.0*mns - 3600.0*hrs
            msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format(hrs, mns, scs))

    # TODO: Move this to fitstbl?
    def show_science(self):
        """
        Simple print of science frames
        """
        indx = self.fitstbl.find_frames('science')
        print(self.fitstbl[['target','ra','dec','exptime','dispname']][indx])

    def __repr__(self):
        # Generate sets string
        return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)
コード例 #2
0
class PypeItSetup(object):
    """
    Prepare for a pypeit run.

    .. todo::
        - This is now mostly a wrapper for PypeItMetaData.  Should we
          remove this class, or merge PypeItSetup and PypeItMetaData.

    The main deliverables are the set of parameters used for pypeit's
    algorithms (:attr:`par`), an :obj:`astropy.table.Table` with the
    details of the files to be reduced (:attr:`fitstbl`), and a
    dictionary with the list of instrument setups.

    Args:
        file_list (list):
            A list of strings with the full path to each file to be
            reduced.
        frametype (:obj:`dict`, optional):
            A dictionary that associates the name of the file (just the
            fits file name without the full path) to a specific frame
            type (e.g., arc, bias, etc.).  If None, this is determined
            by the :func:`get_frame_types` method.
        usrdata (:obj:`astropy.table.Table`, optional):
            A user provided set of data used to supplement or overwrite
            metadata read from the file headers.  The table must have a
            `filename` column that is used to match to the metadata
            table generated within PypeIt.
        setups (:obj:`list`, optional):
            A list of setups that each file can be associated with.  If
            None, all files are expected to be for a single setup.
        cfg_lines (:obj:`list`, optional):
            A list of strings that provide a set of user-defined
            parameters for executing pypeit.  These are the lines of a
            configuration file.  See the documentation for the
            `configobj`_ package.  One of the user-level inputs should
            be the spectrograph that provided the data to be reduced.
            One can get the list of spectrographs currently served by
            running::
                
                from pypeit.spectrographs.util import valid_spectrographs
                print(valid_spectrographs())

            To use all the default parameters when reducing data from a
            given spectrogaph, you can set `cfg_lines = None`, but you
            then *must* provide the `spectrograph_name`.
        spectrograph_name (:obj:`str`, optional):
            If not providing a list of configuration lines
            (`cfg_lines`), this sets the spectrograph.  The spectrograph
            defined in `cfg_lines` takes precedent over anything
            provided by this argument.
        pypeit_file (:obj:`str`, optional):
            The name of the pypeit file used to instantiate the
            reduction.  This can be None, and will lead to default names
            for output files (TODO: Give list).  Setting `pypeit_file`
            here *only sets the name of the file*.  To instantiate a
            `PypitSetup` object directly from a pypeit file (i.e. by
            reading the file), use the :func:`from_pypeit_file` method;
            i.e.::
                
                setup = PypitSetup.from_pypeit_file('myfile.pypeit')

    Attributes:
        file_list (list):
            See description of class argument.
        frametype (dict):
            See description of class argument.
        setups (list):
            See description of class argument.
        pypeit_file (str):
            See description of class argument.
        spectrograph (:class:`pypeit.spectrographs.spectrograph.Spectrograph`):
            An instance of the `Spectograph` class used throughout the
            reduction procedures.
        par (:class:`pypeit.par.pypeitpar.PypitPar`):
            An instance of the `PypitPar` class that provides the
            parameters to all the algorthms that pypeit uses to reduce
            the data.
        fitstbl (:class:`pypeit.metadata.PypeItMetaData`):
            A `Table` that provides the salient metadata for the fits
            files to be reduced.
        setup_dict (dict):
            The dictionary with the list of instrument setups.
        steps (list):
            The steps run to provide the pypeit setup.

    .. _configobj: http://configobj.readthedocs.io/en/latest/
    """
    def __init__(self,
                 file_list,
                 path=None,
                 frametype=None,
                 usrdata=None,
                 setups=None,
                 cfg_lines=None,
                 spectrograph_name=None,
                 pypeit_file=None):

        # The provided list of files cannot be None
        if file_list is None or len(file_list) == 0:
            msgs.error('Must provide a list of files to be reduced!')

        # Save input
        self.file_list = file_list
        self.path = os.getcwd() if path is None else path
        self.frametype = frametype
        self.usrdata = usrdata
        self.setups = setups
        self.pypeit_file = pypeit_file
        self.user_cfg = cfg_lines

        # Determine the spectrograph name
        _spectrograph_name = spectrograph_name if cfg_lines is None \
                    else PypeItPar.from_cfg_lines(merge_with=cfg_lines)['rdx']['spectrograph']

        # Cannot proceed without spectrograph name
        if _spectrograph_name is None:
            msgs.error(
                'Must provide spectrograph name directly or using configuration lines.'
            )

        # Instantiate the spectrograph
        self.spectrograph = load_spectrograph(_spectrograph_name)

        # Get the spectrograph specific configuration to be merged with
        # the user modifications.
        spectrograph_cfg_lines = self.spectrograph.default_pypeit_par(
        ).to_config()

        # Instantiate the pypeit parameters.  The user input
        # configuration (cfg_lines) can be None.
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)

        # Prepare internals for execution
        self.fitstbl = None
        self.setup_dict = None
        self.steps = []

    @classmethod
    def from_pypeit_file(cls, filename):
        """
        Instantiate the :class:`PypeitSetup` object using a pypeit file.

        Args:
            filename (str):
                Name of the pypeit file to read.  Pypit files have a
                specific set of valid formats. A description can be
                found `here`_ (include doc link).
        
        Returns:
            :class:`PypeitSetup`: The instance of the class.
        """
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(
            filename)
        return cls(data_files,
                   frametype=frametype,
                   usrdata=usrdata,
                   setups=setups,
                   cfg_lines=cfg_lines,
                   pypeit_file=filename)

    @classmethod
    def from_file_root(cls,
                       root,
                       spectrograph,
                       extension='.fits',
                       output_path=None):
        """
        Instantiate the :class:`PypeItSetup` object by providing a file
        root.
        
        This is based on first writing a vanilla PypeIt file for the
        provided spectrograph and extension to a file in the provided
        path.

        Args:
            root (:obj:`str`):
                The root path to all the files for PypeIt to reduce.
                This should be everything up to the wild-card before the
                file extension to use to find the relevant files.  The
                root itself can have wild cards to read through multiple
                directories.
            spectrograph (:obj:`str`):
                The PypeIt name of the spectrograph used to take the
                observations.  This should be one of the available
                options in
                :func:`pypeit.spectrographs.valid_spectrographs`.
            extension (:obj:`str`, optional):
                The extension common to all the fits files to reduce.
                Default is '.fits', meaning anything with `root*.fits*`
                will be be included.
            output_path (:obj:`str`, optional):
                Path to use for the output.  If None, the default is
                './setup_files'.  If the path doesn't yet exist, it is
                created.
        
        Returns:
            :class:`PypitSetup`: The instance of the class.
        """
        # Set the output directory
        outdir = os.path.join(
            os.getcwd(), 'setup_files') if output_path is None else output_path
        if not os.path.isdir(outdir):
            os.mkdir(outdir)
        # Set the output file name
        date = str(datetime.date.today().strftime('%Y-%b-%d'))
        pypeit_file = os.path.join(outdir,
                                   '{0}_{1}.pypeit'.format(spectrograph, date))
        msgs.info('A vanilla pypeit file will be written to: {0}'.format(
            pypeit_file))

        # Generate the pypeit file
        cls.vanilla_pypeit_file(pypeit_file,
                                root,
                                spectrograph,
                                extension=extension)

        # Now setup PypeIt using that file
        return cls.from_pypeit_file(pypeit_file)

    @staticmethod
    def vanilla_pypeit_file(pypeit_file,
                            root,
                            spectrograph,
                            extension='.fits'):
        """
        Write a vanilla PypeIt file.

        Args:
            pypeit_file (str):
              Name of PypeIt file
            root (str):
            spectrograph (str):
              Name of spectrograph
            extension (str, optional):
              File extension

        Returns:

        """
        # Generate
        dfname = os.path.join(root, '*{0}*'.format(extension)) \
                    if os.path.isdir(root) else '{0}*{1}*'.format(root, extension)
        # configuration lines
        cfg_lines = ['[rdx]']
        cfg_lines += ['    spectrograph = {0}'.format(spectrograph)]
        #        cfg_lines += ['    sortroot = {0}'.format(root)]
        make_pypeit_file(pypeit_file,
                         spectrograph, [dfname],
                         cfg_lines=cfg_lines,
                         setup_mode=True)

    @property
    def nfiles(self):
        """The number of files to reduce."""
        if self.fitstbl is None:
            msgs.warn('No fits files have been read!')
        return 0 if self.fitstbl is None else len(self.fitstbl)

    def __repr__(self):
        return '<{:s}: nfiles={:d}>'.format(self.__class__.__name__,
                                            self.nfiles)

    def build_fitstbl(self, strict=True):
        """
        Construct the table with metadata for the frames to reduce.

        Largely a wrapper for :func:`pypeit.core.load.create_fitstbl`.

        Args:
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read the headers of any of the files in
                :attr:`file_list`.  Set to False to only report a
                warning and continue.

        Returns:
            :obj:`astropy.table.Table`: Table with the metadata for each
            fits file to reduce.  Note this is different from
            :attr:`fitstbl` which is a :obj:`PypeItMetaData` object
        """
        # Build and sort the table
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      par=self.par,
                                      file_list=self.file_list,
                                      usrdata=self.usrdata,
                                      strict=strict)
        # Sort by the time
        if 'time' in self.fitstbl.keys():
            self.fitstbl.sort('time')

        # Add this to the completed steps
        self.steps.append(inspect.stack()[0][3])

        # Return the table
        return self.fitstbl.table

    def match_ABBA(self):
        """
        Matches science frames to their partner A/B frame
        Mainly a wrapper to arsort.match_ABBA()

        Returns:
            :obj:`PypeItMetaData`: self.fitstbl -- Updated with 'AB_frame' column

        """
        self.fitstbl.match_ABBA()
        self.steps.append(inspect.stack()[0][3])
        return self.fitstbl

    def get_frame_types(self, flag_unknown=False, use_header_id=False):
        """
        Include the frame types in the metadata table.

        This is mainly a wrapper for
        :func:`PypeItMetaData.get_frame_types`.

        .. warning::

            Because this merges the frame types with the existing
            :attr:`fitstbl` this should only be run once.

        Args:
            flag_unknown (:obj:`bool`, optional):
                Allow for frames to have unknown types instead of
                crashing.  This should be True for initial setup and
                False otherwise.  Passed to get_frame_types()
            use_header_id (bool, optional):
                Passed to get_frame_types()

        """
        # Use PypeItMetaData methods to get the frame types
        _ = self.fitstbl.get_frame_types(flag_unknown=flag_unknown,
                                         user=self.frametype,
                                         useIDname=use_header_id)
        # Include finished processing step
        self.steps.append(inspect.stack()[0][3])

    def load_metadata(self, fits_file):
        """
        Load the fitstbl from disk (a binary FITS table)

        Args:
            fits_file (str):  Name of PypeItMetaData file

        Returns:
            obj:`PypeItMetaData`: The so-called fitstbl

        """
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      data=Table.read(fits_file))
        msgs.info("Loaded fitstbl from {:s}".format(fits_file))
        return self.fitstbl.table

    def write_metadata(self, sort_dir=None, ofile=None):
        """
        Write the :class:`astropy.table.Table` object in :attr:`fitstbl`
        to a file.

        If an output file is provided, the file is used.  If that file
        name inclues '.fits', the output will be a fits file; otherwise
        the format is ascii.fixed_width.

        If no output file, the default is an ascii table with an '.lst'
        extension.  The root name of the file is either the spectrograph
        name or the root name of the pypeit file, if the latter exists.

        If a `sort_dir` is provided, the directory of the nominal output
        file is changed to this directory.

        Args:
            sort_dir (:obj:`str`, optional):
                The full root of the name for the metadata table
                ('.lst') file.
            ofile (:obj:`str, optional):
                The name of the file to write.  See description above.
        """
        if ofile is None:
            ofile = self.spectrograph.spectrograph + '.lst' if self.pypeit_file is None \
                        else self.pypeit_file.replace('.pypeit', '.lst')
            if sort_dir is not None:
                ofile = os.path.join(sort_dir, os.path.split(ofile)[1])

        format = None if '.fits' in ofile else 'ascii.fixed_width'
        self.fitstbl.write(
            ofile,
            #columns=None if format is None else self.spectrograph.pypeit_file_keys(),
            format=format,
            overwrite=True)

    def run(self,
            setup_only=False,
            calibration_check=False,
            use_header_id=False,
            sort_dir=None,
            write_bkg_pairs=False):
        """
        Once instantiated, this is the main method used to construct the
        object.
        
        The code flow is as follows::
            - Build the fitstbl from an input file_list (optional)
            - Type the files (bias, arc, etc.)
            - Match calibration files to the science files
            - Generate the setup_dict
                - Write group info to disk
                - Write calib info to disk (if main run)

        It is expected that a user will run this three times if they're
        being careful.  Once with `setup_only=True` to confirm the
        images are properly typed and grouped together for calibration.
        A second time with `calibration_check=True` to confirm the
        appropriate calibrations frames are available.  And a third time
        to do the actual setup before proceeding with the reductions.

        Args:
            setup_only (:obj:`bool`, optional):
                Only this setup will be performed.  Pypit is expected to
                execute in a way that ends after this class is fully
                instantiated such that the user can inspect the results
                before proceeding.  This has the effect of providing
                more output describing the success of the setup and how
                to proceed, and provides warnings (instead of errors)
                for issues that may cause the reduction itself to fail.
            calibration_check (obj:`bool`, optional):
                Only check that the calibration frames are appropriately
                setup and exist on disk.  Pypit is expected to execute
                in a way that ends after this class is fully
                instantiated such that the user can inspect the results
                before proceeding. 
            use_header_id (:obj:`bool`, optional):
                Allow setup to use the frame types drawn from single,
                instrument-specific header keywords set to `idname` in
                the metadata table (:attr:`fitstbl`).
            sort_dir (:obj:`str`, optional):
                The directory to put the '.sorted' file.

        Returns:
            :class:`pypeit.par.pypeitpar.PypeItPar`,
            :class:`pypeit.spectrographs.spectrograph.Spectrograph`,
            :class:`astropy.table.Table`: Returns the attributes
            :attr:`par`, :attr:`spectrograph`, :attr:`fitstbl`
            If running with `setup_only` or
            `calibrations_check`, these are all returned as `None`
            values.
        """
        # Kludge
        pypeit_file = '' if self.pypeit_file is None else self.pypeit_file

        # Build fitstbl
        if self.fitstbl is None:
            self.build_fitstbl(strict=not setup_only)  #, bkg_pairs=bkg_pairs)

        # File typing
        self.get_frame_types(flag_unknown=setup_only or calibration_check,
                             use_header_id=use_header_id)

        # Determine the configurations and assign each frame to the
        # specified configuration
        ignore_frames = ['bias', 'dark']
        cfgs = self.fitstbl.unique_configurations(ignore_frames=ignore_frames)
        self.fitstbl.set_configurations(cfgs, ignore_frames=ignore_frames)

        # Assign frames to calibration groups
        self.fitstbl.set_calibration_groups(global_frames=['bias', 'dark'])

        # Set default comb_id (only done if needed)
        self.fitstbl.set_defaults()

        # Assign science IDs based on the calibrations groups (to be
        # deprecated)
        #self.fitstbl.calib_to_science()
        self.fitstbl['failures'] = False  # TODO: placeholder

        #        if self.par['scienceimage'] is not None and self.par['scienceimage']['nodding']:
        #            self.match_ABBA()

        if setup_only:
            # Collate all matching files and write .sorted Table (on pypeit_setup only)
            sorted_file = self.spectrograph.spectrograph + '.sorted' \
                                if pypeit_file is None or len(pypeit_file) == 0 \
                                else pypeit_file.replace('.pypeit', '.sorted')
            if sort_dir is not None:
                sorted_file = os.path.join(sort_dir,
                                           os.path.split(sorted_file)[1])
            self.fitstbl.write_sorted(sorted_file,
                                      write_bkg_pairs=write_bkg_pairs)
            msgs.info("Wrote sorted file data to {:s}".format(sorted_file))

        else:
            # Write the calib file
            calib_file = self.spectrograph.spectrograph + '.calib' \
                                if pypeit_file is None or len(pypeit_file) == 0 \
                                else pypeit_file.replace('.pypeit', '.calib')
            if sort_dir is not None:
                calib_file = os.path.join(sort_dir,
                                          os.path.split(calib_file)[1])
            self.fitstbl.write_calib(calib_file)

        # Finish (depends on PypeIt run mode)
        # TODO: Do we need this functionality?
        if calibration_check:
            msgs.info("Inspect the .calib file: {:s}".format(calib_file))
            msgs.info(
                "*********************************************************")
            msgs.info("Calibration check complete and successful!")
            #            msgs.info("Set 'run calcheck False' to continue with data reduction")
            msgs.info(
                "*********************************************************")
            #TODO: Why should this not return the same as when setup_only is True

        if setup_only:
            for idx in np.where(self.fitstbl['failures'])[0]:
                msgs.warn(
                    "No Arc found: Skipping object {:s} with file {:s}".format(
                        self.fitstbl['target'][idx],
                        self.fitstbl['filename'][idx]))
            msgs.info("Setup is complete.")
            msgs.info("Inspect the .sorted file")
            return None, None, None

        return self.par, self.spectrograph, self.fitstbl