Ejemplo n.º 1
0
    def get_master_dirkey(self, iFile=None):
        # Do some checks
        if self.spectrograph is None or self.par is None:
            msgs.error()
        idx = self.check_index(iFile)
        # Get the master dir
        if self.par['calibrations']['master_dir'] == 'default':
            mdir = os.path.join(self.par['rdx']['redux_path'], 'Masters')
        else:
            mdir = self.par['calibrations']['master_dir']

        if not os.path.exists(mdir):
            mdir_base = os.path.join(os.getcwd(), os.path.basename(mdir))
            msgs.warn('Master file dir: {0} does not exist. Using {1}'.format(
                mdir, mdir_base))
            mdir = mdir_base
        # Get the base name
        file_base = self.get_basename(iFile=iFile)
        ftdict = dict({file_base: 'science'})
        fitstbl = PypeItMetaData(self.spectrograph,
                                 self.par,
                                 files=[self.data_files[idx]],
                                 usrdata=Table(self.usrdata[idx]),
                                 strict=True)
        fitstbl.finalize_usr_build(ftdict, self.setups[0])
        mkey = fitstbl.master_key(0, det=self.det)
        # Return the result
        return mdir, mkey
Ejemplo n.º 2
0
def test_lris_blue_pypeit_overwrite():
    f = os.path.join(os.environ['PYPEIT_DEV'],
                     'pypeit_files/keck_lris_blue_long_400_3400_d560.pypeit')
    assert os.path.isfile(f), 'Could not find pypeit file.'

    cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(
        f, file_check=False)

    # Change the dev path
    for i in range(len(data_files)):
        path_list = data_files[i].split('/')
        for j, p in enumerate(path_list):
            if p == 'RAW_DATA':
                break
        data_files[i] = os.path.join(os.environ['PYPEIT_DEV'],
                                     '/'.join(path_list[j:]))

    # Read the fits table with and without the user data
    spectrograph = load_spectrograph('keck_lris_blue')
    par = spectrograph.default_pypeit_par()
    fitstbl = PypeItMetaData(spectrograph, par, files=data_files)
    fitstbl_usr = PypeItMetaData(spectrograph,
                                 par,
                                 files=data_files,
                                 usrdata=usrdata)

    assert fitstbl['target'][
        0] == 'unknown', 'Grating name changed in file header'
    assert fitstbl_usr['target'][
        0] == 'test', 'Grating name changed in pypeit file'
    assert fitstbl['target'][0] != fitstbl_usr['target'][0], \
            'Fits header value and input pypeit file value expected to be different.'
Ejemplo n.º 3
0
    def validate_metadata(self):
        """
        Validates the meta definitions of the Spectrograph
        by making a series of comparisons to the meta data model
        definied in metadata.py

        Returns:

        """
        # Load up
        core_meta = PypeItMetaData.define_core_meta()
        meta_data_model = PypeItMetaData.get_meta_data_model()
        # Check core
        for key in core_meta:
            assert key in self.meta.keys(
            ), 'key {:s} not defined in spectrograph meta!'.format(key)
        # Check for rtol for config keys that are type float
        for key in self.configuration_keys():
            if meta_data_model[key]['dtype'] in [float]:
                assert 'rtol' in self.meta[key].keys(
                ), 'rtol not set for key {:s} not defined in spectrograph meta!'.format(
                    key)
        # Now confirm all meta are in the data model
        for key in self.meta.keys():
            if key not in self.meta_data_model.keys():
                msgs.error("Meta data {:s} not in meta_data_model".format(key))
Ejemplo n.º 4
0
    def build_fitstbl(self, strict=True):
        """
        Construct the table with metadata for the frames to reduce.

        Largely a wrapper for :func:`pypeit.core.load.create_fitstbl`.

        Args:
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read the headers of any of the files in
                :attr:`file_list`.  Set to False to only report a
                warning and continue.

        Returns:
            :obj:`astropy.table.Table`: Table with the metadata for each
            fits file to reduce.  Note this is different from
            :attr:`fitstbl` which is a :obj:`PypeItMetaData` object
        """
        # Build and sort the table
        self.fitstbl = PypeItMetaData(self.spectrograph, par=self.par, files=self.file_list,
                                      usrdata=self.usrdata, strict=strict)
        # Sort by the time
        if 'time' in self.fitstbl.keys():
            self.fitstbl.sort('time')

        # Add this to the completed steps
        self.steps.append(inspect.stack()[0][3])

        # Return the table
        return self.fitstbl.table
Ejemplo n.º 5
0
def read_old_fitstbl(spectrograph, f):
    fitstbl = PypeItMetaData(spectrograph, data=Table.read(f))
    type_bits = np.zeros(len(fitstbl), dtype=fitstbl.bitmask.minimum_dtype())
    for bit in fitstbl.bitmask.keys():
        type_bits[fitstbl[bit]] = fitstbl.bitmask.turn_on(
            type_bits[fitstbl[bit]], flag=bit)
    fitstbl.set_frame_types(type_bits)
    return fitstbl
Ejemplo n.º 6
0
def main(args):

    # Load fits file
    cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(args.file, runtime=False)

    # Get the raw fits file name
    sciIdx = get_science_frame(usrdata)
    fname = data_files[sciIdx]

    # Load the spectrograph
    cfg = ConfigObj(cfg_lines)
    spectrograph_name = cfg['rdx']['spectrograph']
    spectrograph = load_spectrograph(spectrograph_name, ifile=data_files[sciIdx])
    msgs.info('Loaded spectrograph {0}'.format(spectrograph.spectrograph))
    spectrograph_cfg_lines = spectrograph.config_specific_par(fname).to_config()
    par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

    # Get the master key
    file_base = os.path.basename(fname)
    ftdict = dict({file_base: 'science'})
    fitstbl = PypeItMetaData(spectrograph, par, files=[data_files[sciIdx]], usrdata=Table(usrdata[sciIdx]), strict=True)
    fitstbl.finalize_usr_build(ftdict, setups[0])
    mkey = fitstbl.master_key(0, det=args.det)

    # Load the frame data
    rawimage, _, _, datasec, _ = spectrograph.get_rawimage(fname, args.det)
    rawimage = procimg.trim_frame(rawimage, datasec < 1)
    frame = spectrograph.orient_image(rawimage, args.det)

    # Set paths
    if par['calibrations']['caldir'] == 'default':
        mdir = os.path.join(par['rdx']['redux_path'], 'Masters')
    else:
        mdir = par['calibrations']['caldir']

    if not os.path.exists(mdir):
        mdir_base = os.path.join(os.getcwd(), os.path.basename(mdir))
        msgs.warn('Master file dir: {0} does not exist. Using {1}'.format(mdir, mdir_base))
        mdir = mdir_base

    # Load the tslits_dict
    trc_file = os.path.join(mdir, MasterFrame.construct_file_name('Edges', mkey)) + '.gz'
    tslits_dict = edgetrace.EdgeTraceSet.from_file(trc_file).convert_to_tslits_dict()

    # Derive an appropriate output filename
    prefix = os.path.splitext(file_base)
    if prefix[1] == ".gz":
        prefix = os.path.splitext(prefix[0])[0]
    else:
        prefix = prefix[0]
    outname = "{0:s}_skyregions.fits".format(prefix)

    # Finally, initialise the GUI
    skyreg = SkySubGUI.initialize(args.det, frame, tslits_dict, outname=outname, runtime=False, printout=True)

    # Get the results
    skyreg.get_result()
Ejemplo n.º 7
0
def test_add_reduce():
    """Test adding history entries for reducing an exposure"""

    # Create a PypeItMetadata object to simulate
    # what would be created in PypeIt::reduce_all
    # This will contain two calibration groups, one
    # that combines two frames and has no calibration
    # frames, and another that combines and subtracts
    # frames and has calibration frames
    spectrograph = load_spectrograph('keck_deimos')
    spectrograph_def_par = spectrograph.default_pypeit_par()
    ftype_bitmask = FrameTypeBitMask()

    science_value = ftype_bitmask.turn_on(np.int_(0),'science')
    standard_value = ftype_bitmask.turn_on(np.int_(0),'standard')
    arc_value = ftype_bitmask.turn_on(np.int_(0),'arc')
    tilt_arc_science_value = ftype_bitmask.turn_on(np.int_(0), 'tilt') | arc_value | science_value

    t = Table(names=['filename',  'frametype',       'framebit',             'target', 'calib', 'comb_id', 'bkg_id'],
              rows=[['file1.fits','science',         science_value,          'targ1',  '1',     '-1',       '-1'],
                    ['file2.fits','standard',        standard_value,         'targ2',  '1',     '-1',       '-1'],
                    ['file3.fits','tilt/arc/science',tilt_arc_science_value, 'targ3',  '2',     '2',        '3'],
                    ['file4.fits','tilt/arc/science',tilt_arc_science_value, 'targ4',  '2',     '3',        '2'], 
                    ['file5.fits','tilt/arc/science',tilt_arc_science_value, 'targ3',  '2',     '2',        '3'],
                    ['file6.fits','tilt/arc/science',tilt_arc_science_value, 'targ4',  '2',     '3',        '2'],
                    ['file7.fits','arc',             arc_value,              'targ5',  '2',     '-1',       '-1']])

    metadata = PypeItMetaData(spectrograph, spectrograph_def_par, data=t)
    metadata.set_calibration_groups()

    #Create history entries for both calibration groups
    history = History()
    history.add_reduce(1, metadata, [0], [])
    history.add_reduce(2, metadata, [2, 4], [3, 5])

    # Verify we get the expected entries
    expected_history = [' PypeIt Reducing target targ1',
                        'Combining frames:', 
                        '"file1.fits"', 
                        ' PypeIt Reducing target targ3',
                        'Combining frames:', 
                        '"file3.fits"', 
                        '"file5.fits"', 
                        'Subtracted background from frames:', 
                        '"file4.fits"', 
                        '"file6.fits"',
                        'Callibration frames:', 
                        'tilt/arc/science "file3.fits"', 
                        'tilt/arc/science "file4.fits"', 
                        'tilt/arc/science "file5.fits"', 
                        'tilt/arc/science "file6.fits"', 
                        'arc "file7.fits"']

    
    assert verify_history(history, expected_history)
Ejemplo n.º 8
0
def test_setup_keck_deimos_multiconfig():

    root = os.path.join(os.environ['PYPEIT_DEV'], 'RAW_DATA', 'keck_deimos')
    files = glob.glob(os.path.join(root, '830G_L_8100', '*fits*'))
    files += glob.glob(os.path.join(root, '830G_L_8400', '*fits*'))

    output_path = os.path.join(os.getcwd(), 'output')
    if os.path.isdir(output_path):
        shutil.rmtree(output_path)
    os.makedirs(output_path)

    ps = pypeitsetup.PypeItSetup(files, spectrograph_name='keck_deimos')
    ps.run(setup_only=True, sort_dir=output_path)
    # Write the automatically generated pypeit data
    pypeit_files = ps.fitstbl.write_pypeit(output_path,
                                           cfg_lines=ps.user_cfg,
                                           write_bkg_pairs=True)

    assert len(pypeit_files) == 2, 'Should have created two pypeit files'

    # Test the pypeit files for the correct configuration and
    # calibration group results
    for f, s, c in zip(pypeit_files, ['A', 'B'], ['0', '1']):

        # TODO: All of this front-end stuff, pulled from pypeit.py, should
        # be put into a function.

        # Read the pypeit file
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(
            f, runtime=True)
        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph = load_spectrograph(cfg['rdx']['spectrograph'])
        # Configuration-specific parameters
        for idx, row in enumerate(usrdata):
            if 'science' in row['frametype'] or 'standard' in row['frametype']:
                break
        spectrograph_cfg_lines = spectrograph.config_specific_par(
            data_files[idx]).to_config()
        #  PypeIt parameters
        par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                       merge_with=cfg_lines)
        #  Metadata
        fitstbl = PypeItMetaData(spectrograph,
                                 par,
                                 files=data_files,
                                 usrdata=usrdata,
                                 strict=True)
        fitstbl.finalize_usr_build(frametype, setups[0])

        assert np.all(fitstbl['setup'] == s), 'Setup is wrong'
        assert np.all(fitstbl['calib'] == c), 'Calibration group is wrong'

    # Clean-up
    shutil.rmtree(output_path)
Ejemplo n.º 9
0
    def load_metadata(self, fits_file):
        """
        Load the fitstbl from disk (a binary FITS table)

        Args:
            fits_file (str):  Name of PypeItMetaData file

        Returns:
            obj:`PypeItMetaData`: The so-called fitstbl

        """
        self.fitstbl = PypeItMetaData(self.spectrograph, self.par, data=Table.read(fits_file))
        msgs.info("Loaded fitstbl from {:s}".format(fits_file))
        return self.fitstbl.table
Ejemplo n.º 10
0
def test_lris_blue_pypeit_overwrite():

    # JFH The RAW_DATA is at PYPEIT_DEV, but the github DEV suite where
    # the pypeit files are at a different path. To fix this, I've just
    # made pypeit_files directory in Cooked and copied this file over.

    # KBW: I'd prefer that you put symlinks in the github dev suite:
    # e.g.:
    #   cd $PYPEIT_DEV
    #   ln -s /Volumes/GoogleDrive/Team\ Drives/PHYS-GP-Hennawi/PypeIt/PypeIt-development-suite/RAW_DATA  RAW_DATA

    # Read the dev suite pypeit file
    f = os.path.join(
        os.environ['PYPEIT_DEV'],
        'Cooked/pypeit_files/keck_lris_blue_long_400_3400_d560.pypeit')
    if not os.path.isfile(f):
        f = os.path.join(
            os.environ['PYPEIT_DEV'],
            'pypeit_files/keck_lris_blue_long_400_3400_d560.pypeit')
    assert os.path.isfile(f), 'Could not find pypeit file.'

    cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(
        f, file_check=False)

    # Change the dev path
    for i in range(len(data_files)):
        path_list = data_files[i].split('/')
        for j, p in enumerate(path_list):
            if p == 'RAW_DATA':
                break
        data_files[i] = os.path.join(os.environ['PYPEIT_DEV'],
                                     '/'.join(path_list[j:]))

    # Read the fits table with and without the user data
    spectrograph = load_spectrograph('keck_lris_blue')
    par = spectrograph.default_pypeit_par()
    fitstbl = PypeItMetaData(spectrograph, par, file_list=data_files)
    fitstbl_usr = PypeItMetaData(spectrograph,
                                 par,
                                 file_list=data_files,
                                 usrdata=usrdata)

    assert fitstbl['target'][
        0] == 'unknown', 'Grating name changed in file header'
    assert fitstbl_usr['target'][
        0] == 'test', 'Grating name changed in pypeit file'
    assert fitstbl['target'][0] != fitstbl_usr['target'][0], \
            'Fits header value and input pypeit file value expected to be different.'
Ejemplo n.º 11
0
    def __init__(self):
        self.spectrograph = 'base'
        self.telescope = None
        self.detector = None
        self.naxis = None
        #        self.raw_naxis = None
        self.datasec_img = None
        self.bpm_img = None

        # Default time unit
        self.timeunit = 'mjd'

        # Default extension with the primary header data
        #   used by arsave.save_2d_images
        self.primary_hdrext = 0
        self.numhead = 0

        self.minexp = 0  # NEED TO TIE TO INSTRUMENT PAR INSTEAD

        # Init Calibrations Par
        #        self._set_calib_par()

        # Init meta
        self.meta_data_model = PypeItMetaData.get_meta_data_model()
        self.init_meta()
        self.validate_metadata()
Ejemplo n.º 12
0
    def header_cards_for_spec(self):
        """
        Define the header cards to be written to spec1d (and maybe spec2d) files.
        These refer to the keys in the fitstbl

        Returns:
            list: Keys for header cards of spec1d

        """
        core_meta = PypeItMetaData.define_core_meta()
        header_cards = list(core_meta.keys())
        # Add a few more
        header_cards += ['filename']  # For fluxing
        return header_cards
Ejemplo n.º 13
0
def test_read_combid():

    # ------------------------------------------------------------------
    # In case of failed tests
    setup_dir = data_path('setup_files')
    if os.path.isdir(setup_dir):
        shutil.rmtree(setup_dir)
    config_dir = data_path('shane_kast_blue_A')
    if os.path.isdir(config_dir):
        shutil.rmtree(config_dir)
    # ------------------------------------------------------------------

    # Generate the pypeit file with the comb_id
    droot = data_path('b')
    pargs = Setup.parse_args([
        '-r', droot, '-s', 'shane_kast_blue', '-c=all', '-b',
        '--extension=fits.gz', '--output_path={:s}'.format(data_path(''))
    ])
    Setup.main(pargs)
    shutil.rmtree(setup_dir)

    pypeit_file = os.path.join(config_dir, 'shane_kast_blue_A.pypeit')
    cfg_lines, data_files, frametype, usrdata, setups, _ = parse_pypeit_file(
        pypeit_file)

    # Get the spectrograph
    spectrograph = None
    for l in cfg_lines:
        if 'spectrograph' in l:
            spectrograph = load_spectrograph(l.split(' ')[-1])
            break
    assert spectrograph is not None, 'Did not appropriately read spectrograph'

    # Set the metadata
    pmd = PypeItMetaData(spectrograph,
                         spectrograph.default_pypeit_par(),
                         files=data_files,
                         usrdata=usrdata,
                         strict=False)

    indx = pmd['filename'] == 'b27.fits.gz'
    assert pmd['comb_id'][indx] == [1], 'Incorrect combination group ID'
    assert pmd['comb_id'][np.where(~indx)
                          [0]][0] == -1, 'Incorrect combination group ID'

    shutil.rmtree(config_dir)
Ejemplo n.º 14
0
    def pypeit_file_keys(self):
        """
        Define the list of keys to be output into a standard PypeIt file

        Returns:
            pypeit_keys: list

        """
        pypeit_keys = ['filename', 'frametype']
        # Core
        core_meta = PypeItMetaData.define_core_meta()
        pypeit_keys += list(core_meta.keys())  # Might wish to order these
        # Add in config_keys (if new)
        for key in self.configuration_keys():
            if key not in pypeit_keys:
                pypeit_keys.append(key)
        # Finish
        return pypeit_keys
Ejemplo n.º 15
0
class PypeIt(object):
    """
    This class runs the primary calibration and extraction in PypeIt

    .. todo::
        Fill in list of attributes!

    Args:
        pypeit_file (:obj:`str`):
            PypeIt filename.
        verbosity (:obj:`int`, optional):
            Verbosity level of system output.  Can be:

                - 0: No output
                - 1: Minimal output (default)
                - 2: All output

        overwrite (:obj:`bool`, optional):
            Flag to overwrite any existing files/directories.
        reuse_masters (:obj:`bool`, optional):
            Reuse any pre-existing calibration files
        logname (:obj:`str`, optional):
            The name of an ascii log file with the details of the
            reduction.
        show: (:obj:`bool`, optional):
            Show reduction steps via plots (which will block further
            execution until clicked on) and outputs to ginga. Requires
            remote control ginga session via ``ginga --modules=RC &``
        redux_path (:obj:`str`, optional):
            Over-ride reduction path in PypeIt file (e.g. Notebook usage)
        calib_only: (:obj:`bool`, optional):
            Only generate the calibration files that you can

    Attributes:
        pypeit_file (:obj:`str`):
            Name of the pypeit file to read.  PypeIt files have a
            specific set of valid formats. A description can be found
            :ref:`pypeit_file`.
        fitstbl (:obj:`pypeit.metadata.PypeItMetaData`): holds the meta info

    """

    #    __metaclass__ = ABCMeta

    def __init__(self,
                 pypeit_file,
                 verbosity=2,
                 overwrite=True,
                 reuse_masters=False,
                 logname=None,
                 show=False,
                 redux_path=None,
                 calib_only=False):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups \
                = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file
        self.calib_only = calib_only

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)
        msgs.info('Loaded spectrograph {0}'.format(
            self.spectrograph.spectrograph))

        # --------------------------------------------------------------
        # Get the full set of PypeIt parameters
        #   - Grab a science or standard file for configuration specific parameters

        config_specific_file = None
        for idx, row in enumerate(usrdata):
            if ('science' in row['frametype']) or ('standard'
                                                   in row['frametype']):
                config_specific_file = data_files[idx]
        # search for arcs, trace if no scistd was there
        if config_specific_file is None:
            for idx, row in enumerate(usrdata):
                if ('arc' in row['frametype']) or ('trace'
                                                   in row['frametype']):
                    config_specific_file = data_files[idx]
        if config_specific_file is not None:
            msgs.info(
                'Setting configuration-specific parameters using {0}'.format(
                    os.path.split(config_specific_file)[1]))
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(
            config_specific_file).to_config()

        #   - Build the full set, merging with any user-provided
        #     parameters
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)
        msgs.info('Built full PypeIt parameter set.')

        # Check the output paths are ready
        if redux_path is not None:
            self.par['rdx']['redux_path'] = redux_path

        # TODO: Write the full parameter set here?
        # --------------------------------------------------------------

        # --------------------------------------------------------------
        # Build the meta data
        #   - Re-initilize based on the file data
        msgs.info('Compiling metadata')
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      files=data_files,
                                      usrdata=usrdata,
                                      strict=True)
        #   - Interpret automated or user-provided data from the PypeIt
        #   file
        self.fitstbl.finalize_usr_build(frametype, setups[0])
        # --------------------------------------------------------------
        #   - Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite

        # Currently the runtime argument determines the behavior for
        # reuse_masters.
        self.reuse_masters = reuse_masters
        self.show = show

        # Set paths
        self.calibrations_path = os.path.join(
            self.par['rdx']['redux_path'],
            self.par['calibrations']['master_dir'])

        # Check for calibrations
        if not self.calib_only:
            calibrations.check_for_calibs(
                self.par,
                self.fitstbl,
                raise_error=self.par['calibrations']['raise_chk_error'])

        # Report paths
        msgs.info('Setting reduction path to {0}'.format(
            self.par['rdx']['redux_path']))
        msgs.info('Master calibration data output to: {0}'.format(
            self.calibrations_path))
        msgs.info('Science data output to: {0}'.format(self.science_path))
        msgs.info('Quality assessment plots output to: {0}'.format(
            self.qa_path))
        # TODO: Is anything written to the qa dir or only to qa/PNGs?
        # Should we have separate calibration and science QA
        # directories?
        # An html file wrapping them all too

        #        # Instantiate Calibrations class
        #        if self.spectrograph.pypeline in ['MultiSlit', 'Echelle']:
        #            self.caliBrate \
        #                = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'],
        #                                                     self.spectrograph, self.calibrations_path,
        #                                                     qadir=self.qa_path,
        #                                                     reuse_masters=self.reuse_masters,
        #                                                     show=self.show,
        #                                                     slitspat_num=self.par['rdx']['slitspatnum'])
        #        elif self.spectrograph.pypeline in ['IFU']:
        #            self.caliBrate \
        #                = calibrations.IFUCalibrations(self.fitstbl, self.par['calibrations'],
        #                                               self.spectrograph,
        #                                               self.calibrations_path,
        #                                               qadir=self.qa_path,
        #                                               reuse_masters=self.reuse_masters,
        #                                               show=self.show)
        #        else:
        #            msgs.error("No calibration available to support pypeline: {0:s}".format(self.spectrograph.pypeline))

        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None

    @property
    def science_path(self):
        """Return the path to the science directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['scidir'])

    @property
    def qa_path(self):
        """Return the path to the top-level QA directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['qadir'])

    def build_qa(self):
        """
        Generate QA wrappers
        """
        qa.gen_mf_html(self.pypeit_file, self.qa_path)
        qa.gen_exp_html()

    # TODO: This should go in a more relevant place
    def spec_output_file(self, frame, twod=False):
        """
        Return the path to the spectral output data file.
        
        Args:
            frame (:obj:`int`):
                Frame index from :attr:`fitstbl`.
            twod (:obj:`bool`):
                Name for the 2D output file; 1D file otherwise.
        
        Returns:
            :obj:`str`: The path for the output file
        """
        return os.path.join(
            self.science_path,
            'spec{0}d_{1}.fits'.format('2' if twod else '1',
                                       self.fitstbl.construct_basename(frame)))

    def outfile_exists(self, frame):
        """
        Check whether the 2D outfile of a given frame already exists

        Args:
            frame (int): Frame index from fitstbl

        Returns:
            bool: True if the 2d file exists, False if it does not exist
        """
        return os.path.isfile(self.spec_output_file(frame, twod=True))

    def get_std_outfile(self, standard_frames):
        """
        Grab the output filename from an input list of standard_frame indices

        If more than one index is provided, the first is taken

        Args:
            standard_frames (list): List of indices corresponding to standard stars

        Returns:
            str: Full path to the standard spec1d output file
        """
        # TODO: Need to decide how to associate standards with
        # science frames in the case where there is more than one
        # standard associated with a given science frame.  Below, I
        # just use the first standard

        std_outfile = None
        std_frame = None if len(standard_frames) == 0 else standard_frames[0]
        # Prepare to load up standard?
        if std_frame is not None:
            std_outfile = self.spec_output_file(std_frame) \
                            if isinstance(std_frame, (int,np.integer)) else None
        if std_outfile is not None and not os.path.isfile(std_outfile):
            msgs.error('Could not find standard file: {0}'.format(std_outfile))
        return std_outfile

    def calib_all(self):
        """
        Create calibrations for all setups

        This will not crash if not all of the standard set of files are not provided


        """

        self.tstart = time.time()

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)
            grp_frames = frame_indx[in_grp]

            # Find the detectors to reduce
            detectors = PypeIt.select_detectors(
                detnum=self.par['rdx']['detnum'], ndet=self.spectrograph.ndet)
            # Loop on Detectors
            for self.det in detectors:
                # Instantiate Calibrations class
                self.caliBrate = calibrations.Calibrations.get_instance(
                    self.fitstbl,
                    self.par['calibrations'],
                    self.spectrograph,
                    self.calibrations_path,
                    qadir=self.qa_path,
                    reuse_masters=self.reuse_masters,
                    show=self.show,
                    slitspat_num=self.par['rdx']['slitspatnum'])
                # Do it
                self.caliBrate.set_config(grp_frames[0], self.det,
                                          self.par['calibrations'])
                self.caliBrate.run_the_steps()

        # Finish
        self.print_end_time()

    def reduce_all(self):
        """
        Main driver of the entire reduction

        Calibration and extraction via a series of calls to reduce_exposure()

        """
        # Validate the parameter set
        self.par.validate_keys(required=[
            'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure'
        ])
        self.tstart = time.time()

        # Find the standard frames
        is_standard = self.fitstbl.find_frames('standard')

        # Find the science frames
        is_science = self.fitstbl.find_frames('science')

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))

        # Iterate over each calibration group and reduce the standards
        for i in range(self.fitstbl.n_calib_groups):

            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the standard frames in this calibration group:
            grp_standards = frame_indx[is_standard & in_grp]

            # Reduce all the standard frames, loop on unique comb_id
            u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards])
            for j, comb_id in enumerate(u_combid_std):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    std_spec2d, std_sobjs = self.reduce_exposure(
                        frames, bg_frames=bg_frames)
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], std_spec2d, std_sobjs,
                                       self.basename)
                else:
                    msgs.info(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

        # Iterate over each calibration group again and reduce the science frames
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the science frames in this calibration group:
            grp_science = frame_indx[is_science & in_grp]
            # Associate standards (previously reduced above) for this setup
            std_outfile = self.get_std_outfile(frame_indx[is_standard])
            # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration
            science_basename = [None] * len(grp_science)
            # Loop on unique comb_id
            u_combid = np.unique(self.fitstbl['comb_id'][grp_science])
            for j, comb_id in enumerate(u_combid):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                # Find all frames whose comb_id matches the current frames bkg_id.
                bg_frames = np.where((self.fitstbl['comb_id'] ==
                                      self.fitstbl['bkg_id'][frames][0])
                                     & (self.fitstbl['comb_id'] >= 0))[0]
                # JFH changed the syntax below to that above, which allows frames to be used more than once
                # as a background image. The syntax below would require that we could somehow list multiple
                # numbers for the bkg_id which is impossible without a comma separated list
                #                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    # TODO -- Should we reset/regenerate self.slits.mask for a new exposure
                    sci_spec2d, sci_sobjs = self.reduce_exposure(
                        frames, bg_frames=bg_frames, std_outfile=std_outfile)
                    science_basename[j] = self.basename
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], sci_spec2d, sci_sobjs,
                                       self.basename)
                else:
                    msgs.warn(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

            msgs.info('Finished calibration group {0}'.format(i))

        # Check if this is an IFU reduction. If so, make a datacube
        if self.spectrograph.pypeline == "IFU" and self.par['reduce']['cube'][
                'make_cube']:
            msgs.work("Generate datacube")

        # Finish
        self.print_end_time()

    # This is a static method to allow for use in coadding script
    @staticmethod
    def select_detectors(detnum=None, ndet=1, slitspatnum=None):
        """
        Return the 1-indexed list of detectors to reduce.

        Args:
            detnum (:obj:`int`, :obj:`list`, optional):
                One or more detectors to reduce.  If None, return the
                full list for the provided number of detectors (`ndet`).
            ndet (:obj:`int`, optional):
                The number of detectors for this instrument.  Only used
                if `detnum is None`.

        Returns:
            list:  List of detectors to be reduced

        """
        if detnum is not None and slitspatnum is not None:
            msgs.error(
                "You cannot specify both detnum and slitspatnum.  Too painful for over-writing SpecObjs"
            )
        if detnum is None and slitspatnum is None:
            return np.arange(1, ndet + 1).tolist()
        elif detnum is not None:
            return np.atleast_1d(detnum).tolist()
        else:
            return slittrace.parse_slitspatnum(slitspatnum)[0].tolist()

    def reduce_exposure(self, frames, bg_frames=None, std_outfile=None):
        """
        Reduce a single exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in :attr:`fitstbl` with the frame to
                reduce.
            bg_frames (:obj:`list`, optional):
                List of frame indices for the background.
            std_outfile (:obj:`str`, optional):
                File with a previously reduced standard spectrum from
                PypeIt.

        Returns:
            dict: The dictionary containing the primary outputs of
            extraction.

        """

        # TODO:
        # - change doc string to reflect that more than one frame can be
        #   provided

        # if show is set, clear the ginga channels at the start of each new sci_ID
        if self.show:
            # TODO: Put this in a try/except block?
            display.clear_all()

        has_bg = True if bg_frames is not None and len(
            bg_frames) > 0 else False

        # Is this an IR reduction?
        # TODO: Why specific to IR?
        self.ir_redux = True if has_bg else False

        # Container for all the Spec2DObj
        all_spec2d = spec2dobj.AllSpec2DObj()
        all_spec2d['meta']['ir_redux'] = self.ir_redux

        # TODO -- Should we reset/regenerate self.slits.mask for a new exposure

        all_specobjs = specobjs.SpecObjs()

        # Print status message
        msgs_string = 'Reducing target {:s}'.format(
            self.fitstbl['target'][frames[0]]) + msgs.newline()
        # TODO: Print these when the frames are actually combined,
        # backgrounds are used, etc?
        msgs_string += 'Combining frames:' + msgs.newline()
        for iframe in frames:
            msgs_string += '{0:s}'.format(
                self.fitstbl['filename'][iframe]) + msgs.newline()
        msgs.info(msgs_string)
        if has_bg:
            bg_msgs_string = ''
            for iframe in bg_frames:
                bg_msgs_string += '{0:s}'.format(
                    self.fitstbl['filename'][iframe]) + msgs.newline()
            bg_msgs_string = msgs.newline(
            ) + 'Using background from frames:' + msgs.newline(
            ) + bg_msgs_string
            msgs.info(bg_msgs_string)

        # Find the detectors to reduce
        detectors = PypeIt.select_detectors(
            detnum=self.par['rdx']['detnum'],
            slitspatnum=self.par['rdx']['slitspatnum'],
            ndet=self.spectrograph.ndet)
        if len(detectors) != self.spectrograph.ndet:
            msgs.warn('Not reducing detectors: {0}'.format(' '.join([
                str(d) for d in set(np.arange(self.spectrograph.ndet)) -
                set(detectors)
            ])))

        # Loop on Detectors
        # TODO: Attempt to put in a multiprocessing call here?
        for self.det in detectors:
            msgs.info("Working on detector {0}".format(self.det))
            # Instantiate Calibrations class
            self.caliBrate = calibrations.Calibrations.get_instance(
                self.fitstbl,
                self.par['calibrations'],
                self.spectrograph,
                self.calibrations_path,
                qadir=self.qa_path,
                reuse_masters=self.reuse_masters,
                show=self.show,
                slitspat_num=self.par['rdx']['slitspatnum'])
            # These need to be separate to accomodate COADD2D
            self.caliBrate.set_config(frames[0], self.det,
                                      self.par['calibrations'])
            self.caliBrate.run_the_steps()
            # Extract
            # TODO: pass back the background frame, pass in background
            # files as an argument. extract one takes a file list as an
            # argument and instantiates science within
            all_spec2d[self.det], tmp_sobjs \
                    = self.reduce_one(frames, self.det, bg_frames, std_outfile=std_outfile)
            # Hold em
            if tmp_sobjs.nobj > 0:
                all_specobjs.add_sobj(tmp_sobjs)
            # JFH TODO write out the background frame?

            # TODO -- Save here?  Seems like we should.  Would probably need to use update_det=True

        # Return
        return all_spec2d, all_specobjs

    def get_sci_metadata(self, frame, det):
        """
        Grab the meta data for a given science frame and specific detector

        Args:
            frame (int): Frame index
            det (int): Detector index

        Returns:
            5 objects are returned::
                - str: Object type;  science or standard
                - str: Setup string from master_key()
                - astropy.time.Time: Time of observation
                - str: Basename of the frame
                - str: Binning of the detector

        """

        # Set binning, obstime, basename, and objtype
        binning = self.fitstbl['binning'][frame]
        obstime = self.fitstbl.construct_obstime(frame)
        basename = self.fitstbl.construct_basename(frame, obstime=obstime)
        objtype = self.fitstbl['frametype'][frame]
        if 'science' in objtype:
            objtype_out = 'science'
        elif 'standard' in objtype:
            objtype_out = 'standard'
        else:
            msgs.error('Unrecognized objtype')
        setup = self.fitstbl.master_key(frame, det=det)
        return objtype_out, setup, obstime, basename, binning

    def get_std_trace(self, std_redux, det, std_outfile):
        """
        Returns the trace of the standard if it is applicable to the current reduction

        Args:
            std_redux (bool): If False, proceed
            det (int): Detector index
            std_outfile (str): Filename for the standard star spec1d file

        Returns:
            ndarray or None: Trace of the standard star on input detector

        """
        if std_redux is False and std_outfile is not None:
            sobjs = specobjs.SpecObjs.from_fitsfile(std_outfile)
            # Does the detector match?
            # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we
            # we would use the standard from another detector
            this_det = sobjs.DET == det
            if np.any(this_det):
                sobjs_det = sobjs[this_det]
                sobjs_std = sobjs_det.get_std()
                # No standard extracted on this detector??
                if sobjs_std is None:
                    return None
                std_trace = sobjs_std.TRACE_SPAT
                # flatten the array if this multislit
                if 'MultiSlit' in self.spectrograph.pypeline:
                    std_trace = std_trace.flatten()
                elif 'Echelle' in self.spectrograph.pypeline:
                    std_trace = std_trace.T
                else:
                    msgs.error('Unrecognized pypeline')
            else:
                std_trace = None
        else:
            std_trace = None

        return std_trace

    def reduce_one(self, frames, det, bg_frames, std_outfile=None):
        """
        Reduce + Extract a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Args:
            frames (:obj:`list`):
                List of frames to extract; stacked if more than one
                is provided
            det (:obj:`int`):
                Detector number (1-indexed)
            bg_frames (:obj:`list`):
                List of frames to use as the background. Can be
                empty.
            std_outfile (:obj:`str`, optional):
                Filename for the standard star spec1d file. Passed
                directly to :func:`get_std_trace`.

        Returns:
            tuple: Returns six `numpy.ndarray`_ objects and a
            :class:`pypeit.specobjs.SpecObjs` object with the
            extracted spectra from this exposure/detector pair. The
            six `numpy.ndarray`_ objects are (1) the science image,
            (2) its inverse variance, (3) the sky model, (4) the
            object model, (5) the model inverse variance, and (6) the
            mask.

        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning \
                = self.get_sci_metadata(frames[0], det)
        msgs.info("Extraction begins for {} on det={}".format(
            self.basename, det))
        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype
        if self.std_redux:
            frame_par = self.par['calibrations']['standardframe']
        else:
            frame_par = self.par['scienceframe']
        # Get the standard trace if need be
        std_trace = self.get_std_trace(self.std_redux, det, std_outfile)

        # Build Science image
        sci_files = self.fitstbl.frame_paths(frames)
        sciImg = buildimage.buildimage_fromlist(
            self.spectrograph,
            det,
            frame_par,
            sci_files,
            bias=self.caliBrate.msbias,
            bpm=self.caliBrate.msbpm,
            dark=self.caliBrate.msdark,
            flatimages=self.caliBrate.flatimages,
            slits=self.caliBrate.slits,  # For flexure correction
            ignore_saturation=False)

        # Background Image?
        if len(bg_frames) > 0:
            bg_file_list = self.fitstbl.frame_paths(bg_frames)
            sciImg = sciImg.sub(
                buildimage.buildimage_fromlist(
                    self.spectrograph,
                    det,
                    frame_par,
                    bg_file_list,
                    bpm=self.caliBrate.msbpm,
                    bias=self.caliBrate.msbias,
                    dark=self.caliBrate.msdark,
                    flatimages=self.caliBrate.flatimages,
                    slits=self.caliBrate.slits,  # For flexure correction
                    ignore_saturation=False),
                frame_par['process'])

        # Instantiate Reduce object
        # Required for pypeline specific object
        # At instantiaton, the fullmask in self.sciImg is modified
        self.redux = reduce.Reduce.get_instance(sciImg,
                                                self.spectrograph,
                                                self.par,
                                                self.caliBrate,
                                                self.objtype,
                                                ir_redux=self.ir_redux,
                                                std_redux=self.std_redux,
                                                setup=self.setup,
                                                show=self.show,
                                                det=det,
                                                binning=self.binning,
                                                std_outfile=std_outfile,
                                                basename=self.basename)
        # Show?
        if self.show:
            self.redux.show('image',
                            image=sciImg.image,
                            chname='processed',
                            slits=True,
                            clear=True)

        # Do it
        skymodel, objmodel, ivarmodel, outmask, sobjs, scaleImg, waveImg, tilts = self.redux.run(
            std_trace=std_trace,
            show_peaks=self.show,
            ra=self.fitstbl["ra"][frames[0]],
            dec=self.fitstbl["dec"][frames[0]],
            obstime=self.obstime)

        # TODO -- Save the slits yet again?

        # TODO -- Do this upstream
        # Tack on detector
        for sobj in sobjs:
            sobj.DETECTOR = sciImg.detector

        # Construct the Spec2DObj
        spec2DObj = spec2dobj.Spec2DObj(det=self.det,
                                        sciimg=sciImg.image,
                                        ivarraw=sciImg.ivar,
                                        skymodel=skymodel,
                                        objmodel=objmodel,
                                        ivarmodel=ivarmodel,
                                        scaleimg=scaleImg,
                                        waveimg=waveImg,
                                        bpmmask=outmask,
                                        detector=sciImg.detector,
                                        sci_spat_flexure=sciImg.spat_flexure,
                                        tilts=tilts,
                                        slits=copy.deepcopy(
                                            self.caliBrate.slits))
        spec2DObj.process_steps = sciImg.process_steps

        # Return
        return spec2DObj, sobjs

    def save_exposure(self, frame, all_spec2d, all_specobjs, basename):
        """
        Save the outputs from extraction for a given exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in the metadata table with the frame
                that has been reduced.
            all_spec2d(:class:`pypeit.spec2dobj.AllSpec2DObj`):
            sci_dict (:obj:`dict`):
                Dictionary containing the primary outputs of
                extraction
            basename (:obj:`str`):
                The root name for the output file.

        Returns:
            None or SpecObjs:  All of the objects saved to disk

        """
        # TODO: Need some checks here that the exposure has been reduced?

        # Determine the headers
        row_fitstbl = self.fitstbl[frame]
        # Need raw file header information
        rawfile = self.fitstbl.frame_paths(frame)
        head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext)

        # Check for the directory
        if not os.path.isdir(self.science_path):
            os.makedirs(self.science_path)

        subheader = self.spectrograph.subheader_for_spec(row_fitstbl, head2d)
        # 1D spectra
        if all_specobjs.nobj > 0:
            # Spectra
            outfile1d = os.path.join(self.science_path,
                                     'spec1d_{:s}.fits'.format(basename))
            all_specobjs.write_to_fits(
                subheader,
                outfile1d,
                update_det=self.par['rdx']['detnum'],
                slitspatnum=self.par['rdx']['slitspatnum'])
            # Info
            outfiletxt = os.path.join(self.science_path,
                                      'spec1d_{:s}.txt'.format(basename))
            all_specobjs.write_info(outfiletxt, self.spectrograph.pypeline)

        # 2D spectra
        outfile2d = os.path.join(self.science_path,
                                 'spec2d_{:s}.fits'.format(basename))
        # Build header
        pri_hdr = all_spec2d.build_primary_hdr(
            head2d,
            self.spectrograph,
            redux_path=self.par['rdx']['redux_path'],
            master_key_dict=self.caliBrate.master_key_dict,
            master_dir=self.caliBrate.master_dir,
            subheader=subheader)
        # Write
        all_spec2d.write_to_fits(outfile2d,
                                 pri_hdr=pri_hdr,
                                 update_det=self.par['rdx']['detnum'])

    def msgs_reset(self):
        """
        Reset the msgs object
        """

        # Reset the global logger
        msgs.reset(log=self.logname, verbosity=self.verbosity)
        msgs.pypeit_file = self.pypeit_file

    def print_end_time(self):
        """
        Print the elapsed time
        """
        # Capture the end time and print it to user
        tend = time.time()
        codetime = tend - self.tstart
        if codetime < 60.0:
            msgs.info('Execution time: {0:.2f}s'.format(codetime))
        elif codetime / 60.0 < 60.0:
            mns = int(codetime / 60.0)
            scs = codetime - 60.0 * mns
            msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs))
        else:
            hrs = int(codetime / 3600.0)
            mns = int(60.0 * (codetime / 3600.0 - hrs))
            scs = codetime - 60.0 * mns - 3600.0 * hrs
            msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format(
                hrs, mns, scs))

    # TODO: Move this to fitstbl?
    def show_science(self):
        """
        Simple print of science frames
        """
        indx = self.fitstbl.find_frames('science')
        print(self.fitstbl[['target', 'ra', 'dec', 'exptime',
                            'dispname']][indx])

    def __repr__(self):
        # Generate sets string
        return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__,
                                               self.pypeit_file)
Ejemplo n.º 16
0
    def __init__(self,
                 pypeit_file,
                 verbosity=2,
                 overwrite=True,
                 reuse_masters=False,
                 logname=None,
                 show=False,
                 redux_path=None,
                 calib_only=False):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups \
                = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file
        self.calib_only = calib_only

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)
        msgs.info('Loaded spectrograph {0}'.format(
            self.spectrograph.spectrograph))

        # --------------------------------------------------------------
        # Get the full set of PypeIt parameters
        #   - Grab a science or standard file for configuration specific parameters

        config_specific_file = None
        for idx, row in enumerate(usrdata):
            if ('science' in row['frametype']) or ('standard'
                                                   in row['frametype']):
                config_specific_file = data_files[idx]
        # search for arcs, trace if no scistd was there
        if config_specific_file is None:
            for idx, row in enumerate(usrdata):
                if ('arc' in row['frametype']) or ('trace'
                                                   in row['frametype']):
                    config_specific_file = data_files[idx]
        if config_specific_file is not None:
            msgs.info(
                'Setting configuration-specific parameters using {0}'.format(
                    os.path.split(config_specific_file)[1]))
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(
            config_specific_file).to_config()

        #   - Build the full set, merging with any user-provided
        #     parameters
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)
        msgs.info('Built full PypeIt parameter set.')

        # Check the output paths are ready
        if redux_path is not None:
            self.par['rdx']['redux_path'] = redux_path

        # TODO: Write the full parameter set here?
        # --------------------------------------------------------------

        # --------------------------------------------------------------
        # Build the meta data
        #   - Re-initilize based on the file data
        msgs.info('Compiling metadata')
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      files=data_files,
                                      usrdata=usrdata,
                                      strict=True)
        #   - Interpret automated or user-provided data from the PypeIt
        #   file
        self.fitstbl.finalize_usr_build(frametype, setups[0])
        # --------------------------------------------------------------
        #   - Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite

        # Currently the runtime argument determines the behavior for
        # reuse_masters.
        self.reuse_masters = reuse_masters
        self.show = show

        # Set paths
        self.calibrations_path = os.path.join(
            self.par['rdx']['redux_path'],
            self.par['calibrations']['master_dir'])

        # Check for calibrations
        if not self.calib_only:
            calibrations.check_for_calibs(
                self.par,
                self.fitstbl,
                raise_error=self.par['calibrations']['raise_chk_error'])

        # Report paths
        msgs.info('Setting reduction path to {0}'.format(
            self.par['rdx']['redux_path']))
        msgs.info('Master calibration data output to: {0}'.format(
            self.calibrations_path))
        msgs.info('Science data output to: {0}'.format(self.science_path))
        msgs.info('Quality assessment plots output to: {0}'.format(
            self.qa_path))
        # TODO: Is anything written to the qa dir or only to qa/PNGs?
        # Should we have separate calibration and science QA
        # directories?
        # An html file wrapping them all too

        #        # Instantiate Calibrations class
        #        if self.spectrograph.pypeline in ['MultiSlit', 'Echelle']:
        #            self.caliBrate \
        #                = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'],
        #                                                     self.spectrograph, self.calibrations_path,
        #                                                     qadir=self.qa_path,
        #                                                     reuse_masters=self.reuse_masters,
        #                                                     show=self.show,
        #                                                     slitspat_num=self.par['rdx']['slitspatnum'])
        #        elif self.spectrograph.pypeline in ['IFU']:
        #            self.caliBrate \
        #                = calibrations.IFUCalibrations(self.fitstbl, self.par['calibrations'],
        #                                               self.spectrograph,
        #                                               self.calibrations_path,
        #                                               qadir=self.qa_path,
        #                                               reuse_masters=self.reuse_masters,
        #                                               show=self.show)
        #        else:
        #            msgs.error("No calibration available to support pypeline: {0:s}".format(self.spectrograph.pypeline))

        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None
Ejemplo n.º 17
0
class PypeItSetup(object):
    """
    Prepare for a pypeit run.

    .. todo::
        - This is now mostly a wrapper for PypeItMetaData.  Should we
          remove this class, or merge PypeItSetup and PypeItMetaData.

    The main deliverables are the set of parameters used for pypeit's
    algorithms (:attr:`par`), an :obj:`astropy.table.Table` with the
    details of the files to be reduced (:attr:`fitstbl`), and a
    dictionary with the list of instrument setups.

    Args:
        file_list (list):
            A list of strings with the full path to each file to be
            reduced.
        frametype (:obj:`dict`, optional):
            A dictionary that associates the name of the file (just the
            fits file name without the full path) to a specific frame
            type (e.g., arc, bias, etc.).  If None, this is determined
            by the :func:`get_frame_types` method.
        usrdata (:obj:`astropy.table.Table`, optional):
            A user provided set of data used to supplement or overwrite
            metadata read from the file headers.  The table must have a
            `filename` column that is used to match to the metadata
            table generated within PypeIt.
        setups (:obj:`list`, optional):
            A list of setups that each file can be associated with.  If
            None, all files are expected to be for a single setup.
        cfg_lines (:obj:`list`, optional):
            A list of strings that provide a set of user-defined
            parameters for executing pypeit.  These are the lines of a
            configuration file.  See the documentation for the
            `configobj`_ package.  One of the user-level inputs should
            be the spectrograph that provided the data to be reduced.
            One can get the list of spectrographs currently served by
            running::

                from pypeit.spectrographs.util import valid_spectrographs
                print(valid_spectrographs())

            To use all the default parameters when reducing data from a
            given spectrogaph, you can set `cfg_lines = None`, but you
            then *must* provide the `spectrograph_name`.
        spectrograph_name (:obj:`str`, optional):
            If not providing a list of configuration lines
            (`cfg_lines`), this sets the spectrograph.  The spectrograph
            defined in `cfg_lines` takes precedent over anything
            provided by this argument.
        pypeit_file (:obj:`str`, optional):
            The name of the pypeit file used to instantiate the
            reduction.  This can be None, and will lead to default names
            for output files (TODO: Give list).  Setting `pypeit_file`
            here *only sets the name of the file*.  To instantiate a
            `PypitSetup` object directly from a pypeit file (i.e. by
            reading the file), use the :func:`from_pypeit_file` method;
            i.e.::

                setup = PypitSetup.from_pypeit_file('myfile.pypeit')

    Attributes:
        file_list (list):
            See description of class argument.
        frametype (dict):
            See description of class argument.
        setups (list):
            See description of class argument.
        pypeit_file (str):
            See description of class argument.
        spectrograph (:class:`pypeit.spectrographs.spectrograph.Spectrograph`):
            An instance of the `Spectograph` class used throughout the
            reduction procedures.
        par (:class:`pypeit.par.pypeitpar.PypitPar`):
            An instance of the `PypitPar` class that provides the
            parameters to all the algorthms that pypeit uses to reduce
            the data.
        fitstbl (:class:`pypeit.metadata.PypeItMetaData`):
            A `Table` that provides the salient metadata for the fits
            files to be reduced.
        setup_dict (dict):
            The dictionary with the list of instrument setups.
        steps (list):
            The steps run to provide the pypeit setup.

    .. _configobj: http://configobj.readthedocs.io/en/latest/
    """

    def __init__(self, file_list, path=None, frametype=None, usrdata=None, setups=None,
                 cfg_lines=None, spectrograph_name=None, pypeit_file=None):

        # The provided list of files cannot be None
        if file_list is None or len(file_list) == 0:
            msgs.error('Must provide a list of files to be reduced!')

        # Save input
        self.file_list = file_list
        self.path = os.getcwd() if path is None else path
        self.frametype = frametype
        self.usrdata = usrdata
        self.setups = setups
        self.pypeit_file = pypeit_file
        self.user_cfg = cfg_lines

        # Determine the spectrograph name
        _spectrograph_name = spectrograph_name if cfg_lines is None \
            else PypeItPar.from_cfg_lines(merge_with=cfg_lines)['rdx']['spectrograph']

        # Cannot proceed without spectrograph name
        if _spectrograph_name is None:
            msgs.error(
                'Must provide spectrograph name directly or using configuration lines.')

        # Instantiate the spectrograph
        self.spectrograph = load_spectrograph(_spectrograph_name)

        # Get the spectrograph specific configuration to be merged with
        # the user modifications.
        spectrograph_cfg_lines = self.spectrograph.default_pypeit_par().to_config()

        # Instantiate the pypeit parameters.  The user input
        # configuration (cfg_lines) can be None.
        self.par = PypeItPar.from_cfg_lines(
            cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

        # Prepare internals for execution
        self.fitstbl = None
        self.setup_dict = None
        self.steps = []

    @classmethod
    def from_pypeit_file(cls, filename):
        """
        Instantiate the :class:`PypeitSetup` object using a pypeit file.

        Args:
            filename (str):
                Name of the pypeit file to read.  Pypit files have a
                specific set of valid formats. A description can be
                found `here`_ (include doc link).

        Returns:
            :class:`PypeitSetup`: The instance of the class.
        """
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(
            filename)
        return cls(data_files, frametype=frametype, usrdata=usrdata, setups=setups,
                   cfg_lines=cfg_lines, pypeit_file=filename)

    @classmethod
    def from_file_root(cls, root, spectrograph, extension='.fits', output_path=None):
        """
        Instantiate the :class:`PypeItSetup` object by providing a file
        root.

        This is based on first writing a vanilla PypeIt file for the
        provided spectrograph and extension to a file in the provided
        path.

        Args:
            root (:obj:`str`):
                The root path to all the files for PypeIt to reduce.
                This should be everything up to the wild-card before the
                file extension to use to find the relevant files.  The
                root itself can have wild cards to read through multiple
                directories.
            spectrograph (:obj:`str`):
                The PypeIt name of the spectrograph used to take the
                observations.  This should be one of the available
                options in
                :func:`pypeit.spectrographs.valid_spectrographs`.
            extension (:obj:`str`, optional):
                The extension common to all the fits files to reduce.
                Default is '.fits', meaning anything with `root*.fits*`
                will be be included.
            output_path (:obj:`str`, optional):
                Path to use for the output.  If None, the default is
                './setup_files'.  If the path doesn't yet exist, it is
                created.

        Returns:
            :class:`PypitSetup`: The instance of the class.
        """
        # Set the output directory
        outdir = os.path.join(
            os.getcwd(), 'setup_files') if output_path is None else output_path
        if not os.path.isdir(outdir):
            os.makedirs(outdir)
        # Set the output file name
        date = str(datetime.date.today().strftime('%Y-%b-%d'))
        pypeit_file = os.path.join(
            outdir, '{0}_{1}.pypeit'.format(spectrograph, date))
        msgs.info(
            'A vanilla pypeit file will be written to: {0}'.format(pypeit_file))

        # Generate the pypeit file
        cls.vanilla_pypeit_file(
            pypeit_file, root, spectrograph, extension=extension)

        # Now setup PypeIt using that file
        return cls.from_pypeit_file(pypeit_file)

    @staticmethod
    def vanilla_pypeit_file(pypeit_file, root, spectrograph, extension='.fits'):
        """
        Write a vanilla PypeIt file.

        Args:
            pypeit_file (str):
              Name of PypeIt file
            root (str):
            spectrograph (str):
              Name of spectrograph
            extension (str, optional):
              File extension

        Returns:

        """
        # Generate
        dfname = os.path.join(root, '*{0}*'.format(extension)) \
            if os.path.isdir(root) else '{0}*{1}*'.format(root, extension)
        # configuration lines
        cfg_lines = ['[rdx]']
        cfg_lines += ['    spectrograph = {0}'.format(spectrograph)]
#        cfg_lines += ['    sortroot = {0}'.format(root)] # check if this needs to be uncommented
        make_pypeit_file(pypeit_file, spectrograph, [
                         dfname], cfg_lines=cfg_lines, setup_mode=True)

    @property
    def nfiles(self):
        """The number of files to reduce."""
        if self.fitstbl is None:
            msgs.warn('No fits files have been read!')
        return 0 if self.fitstbl is None else len(self.fitstbl)

    def __repr__(self):
        return '<{:s}: nfiles={:d}>'.format(self.__class__.__name__, self.nfiles)

    def build_fitstbl(self, strict=True):
        """
        Construct the table with metadata for the frames to reduce.

        Largely a wrapper for :func:`pypeit.core.load.create_fitstbl`.

        Args:
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read the headers of any of the files in
                :attr:`file_list`.  Set to False to only report a
                warning and continue.

        Returns:
            :obj:`astropy.table.Table`: Table with the metadata for each
            fits file to reduce.  Note this is different from
            :attr:`fitstbl` which is a :obj:`PypeItMetaData` object
        """
        # Build and sort the table
        self.fitstbl = PypeItMetaData(self.spectrograph, par=self.par, file_list=self.file_list,  # CHANGE THIS BACK TO files WHEN YOU ARE DONE WITH CODING EVERYTHING ELSE
                                      usrdata=self.usrdata, strict=strict)
        # Sort by the time
        if 'time' in self.fitstbl.keys():
            self.fitstbl.sort('time')

        # Add this to the completed steps
        self.steps.append(inspect.stack()[0][3])

        # Return the table
        return self.fitstbl.table

    def get_frame_types(self, flag_unknown=False, use_header_id=False):
        """
        Include the frame types in the metadata table.

        This is mainly a wrapper for
        :func:`PypeItMetaData.get_frame_types`.

        .. warning::

            Because this merges the frame types with the existing
            :attr:`fitstbl` this should only be run once.

        Args:
            flag_unknown (:obj:`bool`, optional):
                Allow for frames to have unknown types instead of
                crashing.  This should be True for initial setup and
                False otherwise.  Passed to get_frame_types()
            use_header_id (bool, optional):
                Passed to get_frame_types()

        """
        # Use PypeItMetaData methods to get the frame types
        _ = self.fitstbl.get_frame_types(flag_unknown=flag_unknown, user=self.frametype,
                                         useIDname=use_header_id)
        # Include finished processing step
        self.steps.append(inspect.stack()[0][3])

    def load_metadata(self, fits_file):
        """
        Load the fitstbl from disk (a binary FITS table)

        Args:
            fits_file (str):  Name of PypeItMetaData file

        Returns:
            obj:`PypeItMetaData`: The so-called fitstbl

        """
        self.fitstbl = PypeItMetaData(
            self.spectrograph, self.par, data=Table.read(fits_file))
        msgs.info("Loaded fitstbl from {:s}".format(fits_file))
        return self.fitstbl.table

    def write_metadata(self, sort_dir=None, ofile=None):
        """
        Write the :class:`astropy.table.Table` object in :attr:`fitstbl`
        to a file.

        If an output file is provided, the file is used.  If that file
        name inclues '.fits', the output will be a fits file; otherwise
        the format is ascii.fixed_width.

        If no output file, the default is an ascii table with an '.lst'
        extension.  The root name of the file is either the spectrograph
        name or the root name of the pypeit file, if the latter exists.

        If a `sort_dir` is provided, the directory of the nominal output
        file is changed to this directory.

        Args:
            sort_dir (:obj:`str`, optional):
                The full root of the name for the metadata table
                ('.lst') file.
            ofile (:obj:`str, optional):
                The name of the file to write.  See description above.
        """
        if ofile is None:
            ofile = self.spectrograph.spectrograph + '.lst' if self.pypeit_file is None \
                else self.pypeit_file.replace('.pypeit', '.lst')
            if sort_dir is not None:
                ofile = os.path.join(sort_dir, os.path.split(ofile)[1])

        format = None if '.fits' in ofile else 'ascii.fixed_width'
        self.fitstbl.write(ofile,
                           #columns=None if format is None else self.spectrograph.pypeit_file_keys(),
                           format=format, overwrite=True)

    def run(self, setup_only=False, calibration_check=False,
            use_header_id=False, sort_dir=None, write_bkg_pairs=False):
        """
        Once instantiated, this is the main method used to construct the
        object.

        The code flow is as follows::
            - Build the fitstbl from an input file_list (optional)
            - Type the files (bias, arc, etc.)
            - Match calibration files to the science files
            - Generate the setup_dict
                - Write group info to disk
                - Write calib info to disk (if main run)

        It is expected that a user will run this three times if they're
        being careful.  Once with `setup_only=True` to confirm the
        images are properly typed and grouped together for calibration.
        A second time with `calibration_check=True` to confirm the
        appropriate calibrations frames are available.  And a third time
        to do the actual setup before proceeding with the reductions.

        Args:
            setup_only (:obj:`bool`, optional):
                Only this setup will be performed.  Pypit is expected to
                execute in a way that ends after this class is fully
                instantiated such that the user can inspect the results
                before proceeding.  This has the effect of providing
                more output describing the success of the setup and how
                to proceed, and provides warnings (instead of errors)
                for issues that may cause the reduction itself to fail.
            calibration_check (obj:`bool`, optional):
                Only check that the calibration frames are appropriately
                setup and exist on disk.  Pypit is expected to execute
                in a way that ends after this class is fully
                instantiated such that the user can inspect the results
                before proceeding. 
            use_header_id (:obj:`bool`, optional):
                Allow setup to use the frame types drawn from single,
                instrument-specific header keywords set to `idname` in
                the metadata table (:attr:`fitstbl`).
            sort_dir (:obj:`str`, optional):
                The directory to put the '.sorted' file.

        Returns:
            :class:`pypeit.par.pypeitpar.PypeItPar`,
            :class:`pypeit.spectrographs.spectrograph.Spectrograph`,
            :class:`astropy.table.Table`: Returns the attributes
            :attr:`par`, :attr:`spectrograph`, :attr:`fitstbl`
            If running with `setup_only` or
            `calibrations_check`, these are all returned as `None`
            values.
        """
        # Kludge
        pypeit_file = '' if self.pypeit_file is None else self.pypeit_file

        # Build fitstbl
        if self.fitstbl is None:
            self.build_fitstbl(strict=not setup_only)  # , bkg_pairs=bkg_pairs)

        # File typing
        a = self.get_frame_types(flag_unknown=setup_only or calibration_check,
                                 use_header_id=use_header_id)

        # frame types aren't being printed, you have to figure out where you want prediction to be stored, and if you want filepath to spit to be parameter for prediction method

        # spit classification
        spit_frame_types, spit_framebits = self.spit_classify()

        # compare spit to rules-based
        for index, framebit in enumerate(self.fitstbl['framebit']):
                # check if they're different
            if spit_framebits[index] != framebit:
                # if so, notify user of difference, and set to spit's values
                # add at which files they differ so the user can check
                msgs.warn("SPIT and rules-based framebits differ at %s" %
                          (self.fitstbl['filename'][0]))
                self.fitstbl['framebit'][index] = spit_framebits[index]
                self.fitstbl['frametype'][index] = spit_frame_types[index]

        # Determine the configurations and assign each frame to the
        # specified configuration
        ignore_frames = ['bias', 'dark']
        cfgs = self.fitstbl.unique_configurations(ignore_frames=ignore_frames)
        self.fitstbl.set_configurations(cfgs, ignore_frames=ignore_frames)

        # Assign frames to calibration groups
        self.fitstbl.set_calibration_groups(global_frames=['bias', 'dark'])

        # Set default comb_id
        self.fitstbl.set_combination_groups()

        # Assign science IDs based on the calibrations groups (to be
        # deprecated)
        self.fitstbl['failures'] = False                    # TODO: placeholder

        if setup_only:
            # Collate all matching files and write .sorted Table (on pypeit_setup only)
            sorted_file = self.spectrograph.spectrograph + '.sorted' \
                if pypeit_file is None or len(pypeit_file) == 0 \
                else pypeit_file.replace('.pypeit', '.sorted')
            if sort_dir is not None:
                sorted_file = os.path.join(
                    sort_dir, os.path.split(sorted_file)[1])
            self.fitstbl.write_sorted(
                sorted_file, write_bkg_pairs=write_bkg_pairs)
            msgs.info("Wrote sorted file data to {:s}".format(sorted_file))

        else:
            # Write the calib file
            calib_file = self.spectrograph.spectrograph + '.calib' \
                if pypeit_file is None or len(pypeit_file) == 0 \
                else pypeit_file.replace('.pypeit', '.calib')
            if sort_dir is not None:
                calib_file = os.path.join(
                    sort_dir, os.path.split(calib_file)[1])
            self.fitstbl.write_calib(calib_file)

        # Finish (depends on PypeIt run mode)
        # TODO: Do we need this functionality?
        if calibration_check:
            msgs.info("Inspect the .calib file: {:s}".format(calib_file))
            msgs.info("*********************************************************")
            msgs.info("Calibration check complete and successful!")
#            msgs.info("Set 'run calcheck False' to continue with data reduction")
            msgs.info("*********************************************************")
            # TODO: Why should this not return the same as when setup_only is True

        if setup_only:
            for idx in np.where(self.fitstbl['failures'])[0]:
                msgs.warn("No Arc found: Skipping object {:s} with file {:s}".format(
                    self.fitstbl['target'][idx], self.fitstbl['filename'][idx]))
            msgs.info("Setup is complete.")
            msgs.info("Inspect the .sorted file")
            return None, None, None

        return self.par, self.spectrograph, self.fitstbl

    def spit_classify(self, spit_path=os.getenv('SPIT_MODEL_PATH')):
        """
        Get frame types of all SPIT predictions.

        Parameters:
            spit_path:
                Path to the best spit model. Environmental variable by default.
                ***User must have model downloaded, and must manually set path.***
        Returns:
            spit_frame_types:
                1-d list containing string frame type for each file
            spit_framebits:
                1-d list containing frame bit integers for each file
        """

        # load best model, SPIT_MODEL_PATH must be set by user elsewhere until best_model.h5 is in spit or pypeit repo
        best_model = keras.models.load_model(spit_path+'best_model.h5')

        # initialize predictions list
        predictions = []

        # loop through all files in fitstbl['filename']
        for directory, file in zip(self.fitstbl['directory'], self.fitstbl['filename']):
            # get image data from fits file
            raw_frame, hdu = self.spectrograph.load_raw_frame(
                os.path.join(directory, file))

            # perform pre-processing on the image
            preproc_dict = spit_p.original_preproc_dict()
            img_np = spit_p.process_image(raw_frame, preproc_dict)
            
            img_np = np.uint8(img_np)
            # classify with spit
            pred_list, pred = self.predict_one_image(img_np, best_model)

            # add to list of predictions
            predictions.append(pred)

        # get frame types and framebits from spit predictions
        spit_frame_types, spit_framebits = self.predictions_to_framebits(
            predictions)

        # return predictions
        return spit_frame_types, spit_framebits

    def predict_one_image(self, image, model):
        """
        Predict one image's label with a given neural network.

        Parameters:
            image:
                2-d numpy array containing image data
            model: 
                keras model, likely densenet
        Returns:
            prediction[0]: 
                1-d array containing probability of each label being the true label
            maxIndex[0][0]: 
                integer containing the index at which the highest probability (predicted label) is contained
        """
        # get shape of the image
        ishape = list(image.shape)

        # reshape to what is necessary for classifier
        image = image.reshape([1]+ishape+[1])

        # get predictions for labels
        prediction = model.predict(image)

        # get max value of prediction
        maxIndex = np.where(prediction[0] == np.amax(prediction[0]))

        # return correct values within respective return values
        return prediction[0], maxIndex[0][0]

    def predictions_to_framebits(self, predictions):
        """
        Get framebits and frame types of all SPIT predictions.

        Parameters:
            predictions: 
                1-d list containing integers representing index of label
        Returns:
            spit_frame_types:
                1-d list containing string frame type for each file
                type_bits:
                        1-d list containing frame bit integers for each file
        """
        # convert integers to string labels using label_dict and frame_types, store in numpy array with each index having strings of frametypes
        spit_labels = self.get_spit_labels()
        spit_frame_types = self.get_spit_frame_types(spit_labels, predictions)

        # use some of the code from metadata.py lines 1267-1295
        type_bits = np.zeros(
            len(self.fitstbl), dtype=self.fitstbl.type_bitmask.minimum_dtype())

        # create dictionary to be used for setting framebit
        user = {}
        for index, file in enumerate(self.fitstbl['filename']):
            user[file] = spit_frame_types[index]

        # Use the user-defined frame types from the input dictionary, could eventually integrate this and other methods in metadata.py
        if len(user.keys()) != len(self.fitstbl):
            raise ValueError(
                'The user-provided dictionary does not match table length.')
        msgs.info('Converting SPIT frame types.')
        for ifile, ftypes in user.items():
            indx = self.fitstbl['filename'] == ifile
            # turn on relevant framebits
            type_bits[indx] = self.fitstbl.type_bitmask.turn_on(
                type_bits[indx], flag=ftypes.split(','))

        return spit_frame_types, type_bits

    def get_spit_labels(self):
        """
        Get SPIT labels in framematch format.

        Parameters:
                None
        Returns:
            classes:
                1-d list containing cleaned up SPIT labels for frame typing.
        """

        # initialize label dict and classes array, could optimize this
        label_dict = spit_l.kast_label_dict()
        classes = []

        for label in label_dict:
            if '_label' in label:
                label = label.replace('_label', '')
            classes.append(label)
        return classes

    def get_spit_frame_types(self, spit_labels, predictions):
        """
        Get frame types of all SPIT predictions.

        Parameters:
            spit_labels:
                Cleaned up labels from SPIT's label dict (strings)
            predictions: 
                1-d list containing integers representing index of label
        Returns:
            spit_frame_types:
                1-d list containing string frame type for each file
        """
        # initialize empty list
        spit_frame_types = []

        # get all frame types for each label
        for p in predictions:
            spit_frame_type = spit_labels[p]  # a string

            # map spit labels to proper frames for specific cases
            if 'flat' in spit_frame_type:
            	spit_frame_type = spit_frame_type.replace('flat','pixelflat,trace')
            elif 'arc' in spit_frame_type:
            	spit_frame_type += ',tilt'
            spit_frame_types.append(spit_frame_type)
        return spit_frame_types
Ejemplo n.º 18
0
class PypeItSetup:
    """
    Prepare for a pypeit run.

    .. todo::
        - This is now mostly a wrapper for PypeItMetaData.  Should we
          remove this class, or merge PypeItSetup and PypeItMetaData.

    The main deliverables are the set of parameters used for pypeit's
    algorithms (:attr:`par`), an :obj:`astropy.table.Table` with the
    details of the files to be reduced (:attr:`fitstbl`), and a
    dictionary with the list of instrument setups.

    Args:
        file_list (list):
            A list of strings with the full path to each file to be
            reduced.
        frametype (:obj:`dict`, optional):
            A dictionary that associates the name of the file (just the
            fits file name without the full path) to a specific frame
            type (e.g., arc, bias, etc.).  If None, this is determined
            by the :func:`get_frame_types` method.
        usrdata (:obj:`astropy.table.Table`, optional):
            A user provided set of data used to supplement or overwrite
            metadata read from the file headers.  The table must have a
            `filename` column that is used to match to the metadata
            table generated within PypeIt.
        setups (:obj:`list`, optional):
            A list of setups that each file can be associated with.  If
            None, all files are expected to be for a single setup.
        cfg_lines (:obj:`list`, optional):
            A list of strings that provide a set of user-defined
            parameters for executing pypeit.  These are the lines of a
            configuration file.  See the documentation for the
            `configobj`_ package.  One of the user-level inputs should
            be the spectrograph that provided the data to be reduced.
            One can get the list of spectrographs currently served by
            running::
                
                from pypeit.spectrographs.util import valid_spectrographs
                print(valid_spectrographs())

            To use all the default parameters when reducing data from a
            given spectrograph, you can set `cfg_lines = None`, but you
            then *must* provide the `spectrograph_name`.
        spectrograph_name (:obj:`str`, optional):
            If not providing a list of configuration lines
            (`cfg_lines`), this sets the spectrograph.  The spectrograph
            defined in `cfg_lines` takes precedent over anything
            provided by this argument.
        pypeit_file (:obj:`str`, optional):
            The name of the pypeit file used to instantiate the
            reduction. This can be None, and will lead to default
            names for output files (TODO: Give list). Setting
            :ref:`pypeit_file` here *only sets the name of the file*.
            To instantiate a :class:`~pypeit.pypeitsetup.PypeItSetup`
            object directly from a pypeit file (i.e. by reading the
            file), use the :func:`from_pypeit_file` method; i.e.::
                
                setup = PypeItSetup.from_pypeit_file('myfile.pypeit')

    Attributes:
        file_list (list):
            See description of class argument.
        frametype (dict):
            See description of class argument.
        setups (list):
            See description of class argument.
        pypeit_file (str):
            See description of class argument.
        spectrograph (:class:`pypeit.spectrographs.spectrograph.Spectrograph`):
            An instance of the `Spectograph` class used throughout the
            reduction procedures.
        par (:class:`pypeit.par.pypeitpar.PypitPar`):
            An instance of the `PypitPar` class that provides the
            parameters to all the algorthms that pypeit uses to reduce
            the data.
        fitstbl (:class:`pypeit.metadata.PypeItMetaData`):
            A `Table` that provides the salient metadata for the fits
            files to be reduced.
        setup_dict (dict):
            The dictionary with the list of instrument setups.
        steps (list):
            The steps run to provide the pypeit setup.
    """
    def __init__(self, file_list, path=None, frametype=None, usrdata=None, setups=None,
                 cfg_lines=None, spectrograph_name=None, pypeit_file=None):

        # The provided list of files cannot be None
        if file_list is None or len(file_list) == 0:
            msgs.error('Must provide a list of files to be reduced!')

        # Save input
        self.file_list = file_list
        self.path = os.getcwd() if path is None else path
        self.frametype = frametype
        self.usrdata = usrdata
        self.setups = setups
        self.pypeit_file = pypeit_file
        self.user_cfg = cfg_lines

        # Determine the spectrograph name
        _spectrograph_name = spectrograph_name if cfg_lines is None \
                    else PypeItPar.from_cfg_lines(merge_with=cfg_lines)['rdx']['spectrograph']

        # Cannot proceed without spectrograph name
        if _spectrograph_name is None:
            msgs.error('Must provide spectrograph name directly or using configuration lines.')
       
        # Instantiate the spectrograph
        self.spectrograph = load_spectrograph(_spectrograph_name)#, ifile=file_list[0])

        # Get the spectrograph specific configuration to be merged with
        # the user modifications.
        spectrograph_cfg_lines = self.spectrograph.default_pypeit_par().to_config()

        # Instantiate the pypeit parameters.  The user input
        # configuration (cfg_lines) can be None.
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

        # Prepare internals for execution
        self.fitstbl = None
        self.setup_dict = None
        self.steps = []

    @classmethod
    def from_pypeit_file(cls, filename):
        """
        Instantiate the :class:`PypeitSetup` object using a pypeit file.

        Args:
            filename (str):
                Name of the pypeit file to read.  Pypit files have a
                specific set of valid formats. A description can be
                found :ref:`pypeit_file`.
        
        Returns:
            :class:`PypeitSetup`: The instance of the class.
        """
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(filename)
        return cls(data_files, frametype=frametype, usrdata=usrdata, setups=setups,
                   cfg_lines=cfg_lines, pypeit_file=filename)

    @classmethod
    def from_file_root(cls, root, spectrograph, extension='.fits', output_path=None):
        """
        Instantiate the :class:`PypeItSetup` object by providing a file
        root.
        
        This is based on first writing a vanilla PypeIt file for the
        provided spectrograph and extension to a file in the provided
        path.

        Args:
            root (:obj:`str`):
                The root path to all the files for PypeIt to reduce.
                This should be everything up to the wild-card before the
                file extension to use to find the relevant files.  The
                root itself can have wild cards to read through multiple
                directories.
            spectrograph (:obj:`str`):
                The PypeIt name of the spectrograph used to take the
                observations.  This should be one of the available
                options in
                :func:`pypeit.spectrographs.valid_spectrographs`.
            extension (:obj:`str`, optional):
                The extension common to all the fits files to reduce.
                Default is '.fits', meaning anything with `root*.fits*`
                will be be included.
            output_path (:obj:`str`, optional):
                Path to use for the output.  If None, the default is
                './setup_files'.  If the path doesn't yet exist, it is
                created.
        
        Returns:
            :class:`PypitSetup`: The instance of the class.
        """
        # Set the output directory
        outdir = os.path.join(os.getcwd(), 'setup_files') if output_path is None else output_path
        if not os.path.isdir(outdir):
            os.makedirs(outdir)
        # Set the output file name
        date = str(datetime.date.today().strftime('%Y-%b-%d'))
        pypeit_file = os.path.join(outdir, '{0}_{1}.pypeit'.format(spectrograph, date))
        msgs.info('A vanilla pypeit file will be written to: {0}'.format(pypeit_file))
        
        # Generate the pypeit file
        cls.vanilla_pypeit_file(pypeit_file, root, spectrograph, extension=extension)

        # Now setup PypeIt using that file
        return cls.from_pypeit_file(pypeit_file)

    @staticmethod
    def vanilla_pypeit_file(pypeit_file, root, spectrograph, extension='.fits'):
        """
        Write a vanilla PypeIt file.

        Args:
            pypeit_file (str):
              Name of PypeIt file to be generated
            root (str):
            spectrograph (str):
              Name of spectrograph
            extension (str, optional):
              File extension

        Returns:

        """
        # Generate
        dfname = os.path.join(root, '*{0}*'.format(extension)) \
                    if os.path.isdir(root) else '{0}*{1}*'.format(root, extension)
        # configuration lines
        cfg_lines = ['[rdx]']
        cfg_lines += ['    spectrograph = {0}'.format(spectrograph)]
#        cfg_lines += ['    sortroot = {0}'.format(root)]
        make_pypeit_file(pypeit_file, spectrograph, [dfname], cfg_lines=cfg_lines, setup_mode=True)

    @property
    def nfiles(self):
        """The number of files to reduce."""
        if self.fitstbl is None:
            msgs.warn('No fits files have been read!')
        return 0 if self.fitstbl is None else len(self.fitstbl)

    def __repr__(self):
        return '<{:s}: nfiles={:d}>'.format(self.__class__.__name__, self.nfiles)

    def build_fitstbl(self, strict=True):
        """
        Construct the table with metadata for the frames to reduce.

        Largely a wrapper for :func:`pypeit.core.load.create_fitstbl`.

        Args:
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read the headers of any of the files in
                :attr:`file_list`.  Set to False to only report a
                warning and continue.

        Returns:
            :obj:`astropy.table.Table`: Table with the metadata for each
            fits file to reduce.  Note this is different from
            :attr:`fitstbl` which is a :obj:`PypeItMetaData` object
        """
        # Build and sort the table
        self.fitstbl = PypeItMetaData(self.spectrograph, par=self.par, files=self.file_list,
                                      usrdata=self.usrdata, strict=strict)
        # Sort by the time
        if 'time' in self.fitstbl.keys():
            self.fitstbl.sort('time')

        # Add this to the completed steps
        self.steps.append(inspect.stack()[0][3])

        # Return the table
        return self.fitstbl.table

    def get_frame_types(self, flag_unknown=False, use_header_id=False):
        """
        Include the frame types in the metadata table.

        This is mainly a wrapper for
        :func:`PypeItMetaData.get_frame_types`.

        .. warning::

            Because this merges the frame types with the existing
            :attr:`fitstbl` this should only be run once.

        Args:
            flag_unknown (:obj:`bool`, optional):
                Allow for frames to have unknown types instead of
                crashing.  This should be True for initial setup and
                False otherwise.  Passed to get_frame_types()
            use_header_id (bool, optional):
                Passed to get_frame_types()

        """
        # Use PypeItMetaData methods to get the frame types
        _ = self.fitstbl.get_frame_types(flag_unknown=flag_unknown, user=self.frametype,
                                         useIDname=use_header_id)
        # Include finished processing step
        self.steps.append(inspect.stack()[0][3])

    def load_metadata(self, fits_file):
        """
        Load the fitstbl from disk (a binary FITS table)

        Args:
            fits_file (str):  Name of PypeItMetaData file

        Returns:
            obj:`PypeItMetaData`: The so-called fitstbl

        """
        self.fitstbl = PypeItMetaData(self.spectrograph, self.par, data=Table.read(fits_file))
        msgs.info("Loaded fitstbl from {:s}".format(fits_file))
        return self.fitstbl.table

    def write_metadata(self, sort_dir=None, ofile=None):
        """
        Write the `astropy.table.Table`_ object in :attr:`fitstbl`
        to a file.

        If an output file is provided, the file is used.  If that file
        name inclues '.fits', the output will be a fits file; otherwise
        the format is ascii.fixed_width.

        If no output file, the default is an ascii table with an '.lst'
        extension.  The root name of the file is either the spectrograph
        name or the root name of the pypeit file, if the latter exists.

        If a `sort_dir` is provided, the directory of the nominal output
        file is changed to this directory.

        Args:
            sort_dir (:obj:`str`, optional):
                The full root of the name for the metadata table
                ('.lst') file.
            ofile (:obj:`str`, optional):
                The name of the file to write.  See description above.
        """
        if ofile is None:
            ofile = self.spectrograph.name + '.lst' if self.pypeit_file is None \
                        else self.pypeit_file.replace('.pypeit', '.lst')
            if sort_dir is not None:
                ofile = os.path.join(sort_dir, os.path.split(ofile)[1])

        format = None if '.fits' in ofile else 'ascii.fixed_width'
        self.fitstbl.write(ofile,
                           #columns=None if format is None else self.spectrograph.pypeit_file_keys(),
                           format=format, overwrite=True)

    # TODO: Check if user_header_id is ever actually used.
    def run(self, setup_only=False, calibration_check=False, use_header_id=False, sort_dir=None,
            write_bkg_pairs=False, clean_config=True):
        """
        Once instantiated, this is the main method used to construct the
        object.
        
        The code flow is as follows::
            - Build the fitstbl from an input file_list (optional)
            - Type the files (bias, arc, etc.)
            - Match calibration files to the science files
            - Generate the setup_dict
                - Write group info to disk
                - Write calib info to disk (if main run)

        It is expected that a user will run this three times if they're
        being careful.  Once with `setup_only=True` to confirm the
        images are properly typed and grouped together for calibration.
        A second time with `calibration_check=True` to confirm the
        appropriate calibrations frames are available.  And a third time
        to do the actual setup before proceeding with the reductions.

        Args:
            setup_only (:obj:`bool`, optional):
                Only this setup will be performed. ``PypeIt`` is
                expected to execute in a way that ends after this
                class is fully instantiated such that the user can
                inspect the results before proceeding. This has the
                effect of providing more output describing the
                success of the setup and how to proceed, and provides
                warnings (instead of errors) for issues that may
                cause the reduction itself to fail.
            calibration_check (obj:`bool`, optional):
                Only check that the calibration frames are
                appropriately setup and exist on disk. ``PypeIt`` is
                expected to execute in a way that ends after this
                class is fully instantiated such that the user can
                inspect the results before proceeding.
            use_header_id (:obj:`bool`, optional):
                Allow setup to use the frame types drawn from single,
                instrument-specific header keywords set to ``idname``
                in the metadata table (:attr:`fitstbl`).
            sort_dir (:obj:`str`, optional):
                The directory to put the '.sorted' file.
            write_bkg_pairs (:obj:`bool`, optional):
                Include columns with the (unassigned) background
                image pairs. This is a convenience so that users can
                more easily add/edit the background pair ID numbers.
            clean_config (:obj:`bool`, optional):
                Remove files with metadata that indicate an
                instrument configuration that ``PypeIt`` cannot
                reduce. See
                :func:`~pypeit.spectrographs.spectrograph.Spectrograph.valid_configuration_values`.

        Returns:
            :obj:`tuple`: Returns, respectively, the
            :class:`~pypeit.par.pypeitpar.PypeItPar` object with the
            reduction parameters, the
            :class:`~pypeit.spectrographs.spectrograph.Spectrograph`
            object with the spectrograph instance, and an
            `astropy.table.Table`_ with the frame metadata. If
            ``setup_only`` is True, these are all returned as None
            values.
        """
        # Kludge
        pypeit_file = '' if self.pypeit_file is None else self.pypeit_file

        # Build fitstbl
        if self.fitstbl is None:
            self.build_fitstbl(strict=not setup_only)#, bkg_pairs=bkg_pairs)

        # Remove frames that have invalid values for
        # configuration-defining metadata
        if clean_config:
            self.fitstbl.clean_configurations()

        # File typing
        self.get_frame_types(flag_unknown=setup_only or calibration_check,
                             use_header_id=use_header_id)

        # Determine the configurations and assign each frame to the
        # specified configuration
        cfgs = self.fitstbl.unique_configurations()
        self.fitstbl.set_configurations(cfgs)

        # Assign frames to calibration groups
        self.fitstbl.set_calibration_groups() #global_frames=['bias', 'dark'])

        # Set default comb_id
        self.fitstbl.set_combination_groups()

        # TODO: Are we planning to do this?
        # Assign science IDs based on the calibrations groups (to be
        # deprecated)
        self.fitstbl['failures'] = False                    # TODO: placeholder

        if setup_only:
            # Collate all matching files and write .sorted Table (on pypeit_setup only)
            sorted_file = self.spectrograph.name + '.sorted' \
                                if pypeit_file is None or len(pypeit_file) == 0 \
                                else pypeit_file.replace('.pypeit', '.sorted')
            if sort_dir is not None:
                sorted_file = os.path.join(sort_dir, os.path.split(sorted_file)[1])
            self.fitstbl.write_sorted(sorted_file, write_bkg_pairs=write_bkg_pairs)
            msgs.info("Wrote sorted file data to {:s}".format(sorted_file))

        else:
            # Write the calib file
            # This is currently needed for QA
            calib_file = self.spectrograph.name + '.calib' \
                                if pypeit_file is None or len(pypeit_file) == 0 \
                                else pypeit_file.replace('.pypeit', '.calib')
            if sort_dir is not None:
                calib_file = os.path.join(sort_dir, os.path.split(calib_file)[1])
            self.fitstbl.write_calib(calib_file)

        # Finish (depends on PypeIt run mode)
        # TODO: Do we need this functionality?
        if calibration_check:
            msgs.info("Inspect the .calib file: {:s}".format(calib_file))
            msgs.info("*********************************************************")
            msgs.info("Calibration check complete and successful!")
#            msgs.info("Set 'run calcheck False' to continue with data reduction")
            msgs.info("*********************************************************")
            #TODO: Why should this not return the same as when setup_only is True

        if setup_only:
            for idx in np.where(self.fitstbl['failures'])[0]:
                msgs.warn("No Arc found: Skipping object {:s} with file {:s}".format(
                            self.fitstbl['target'][idx],self.fitstbl['filename'][idx]))
            msgs.info("Setup is complete.")
            msgs.info("Inspect the .sorted file")
            return None, None, None

        return self.par, self.spectrograph, self.fitstbl
Ejemplo n.º 19
0
class PypeIt:
    """
    This class runs the primary calibration and extraction in PypeIt

    .. todo::
        Fill in list of attributes!

    Args:
        pypeit_file (:obj:`str`):
            PypeIt filename.
        verbosity (:obj:`int`, optional):
            Verbosity level of system output.  Can be:

                - 0: No output
                - 1: Minimal output (default)
                - 2: All output

        overwrite (:obj:`bool`, optional):
            Flag to overwrite any existing files/directories.
        reuse_masters (:obj:`bool`, optional):
            Reuse any pre-existing calibration files
        logname (:obj:`str`, optional):
            The name of an ascii log file with the details of the
            reduction.
        show: (:obj:`bool`, optional):
            Show reduction steps via plots (which will block further
            execution until clicked on) and outputs to ginga. Requires
            remote control ginga session via ``ginga --modules=RC,SlitWavelength &``
        redux_path (:obj:`str`, optional):
            Over-ride reduction path in PypeIt file (e.g. Notebook usage)
        calib_only: (:obj:`bool`, optional):
            Only generate the calibration files that you can

    Attributes:
        pypeit_file (:obj:`str`):
            Name of the pypeit file to read.  PypeIt files have a
            specific set of valid formats. A description can be found
            :ref:`pypeit_file`.
        fitstbl (:obj:`pypeit.metadata.PypeItMetaData`): holds the meta info

    """
    def __init__(self,
                 pypeit_file,
                 verbosity=2,
                 overwrite=True,
                 reuse_masters=False,
                 logname=None,
                 show=False,
                 redux_path=None,
                 calib_only=False):

        # Set up logging
        self.logname = logname
        self.verbosity = verbosity
        self.pypeit_file = pypeit_file

        self.msgs_reset()

        # Load
        cfg_lines, data_files, frametype, usrdata, setups, _ \
                = parse_pypeit_file(pypeit_file, runtime=True)
        self.calib_only = calib_only

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)
        msgs.info('Loaded spectrograph {0}'.format(self.spectrograph.name))

        # --------------------------------------------------------------
        # Get the full set of PypeIt parameters
        #   - Grab a science or standard file for configuration specific parameters

        config_specific_file = None
        for idx, row in enumerate(usrdata):
            if ('science' in row['frametype']) or ('standard'
                                                   in row['frametype']):
                config_specific_file = data_files[idx]
        # search for arcs, trace if no scistd was there
        if config_specific_file is None:
            for idx, row in enumerate(usrdata):
                if ('arc' in row['frametype']) or ('trace'
                                                   in row['frametype']):
                    config_specific_file = data_files[idx]
        if config_specific_file is not None:
            msgs.info(
                'Setting configuration-specific parameters using {0}'.format(
                    os.path.split(config_specific_file)[1]))
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(
            config_specific_file).to_config()

        #   - Build the full set, merging with any user-provided
        #     parameters
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)
        msgs.info('Built full PypeIt parameter set.')

        # Check the output paths are ready
        if redux_path is not None:
            self.par['rdx']['redux_path'] = redux_path

        # TODO: Write the full parameter set here?
        # --------------------------------------------------------------

        # --------------------------------------------------------------
        # Build the meta data
        #   - Re-initilize based on the file data
        msgs.info('Compiling metadata')
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      files=data_files,
                                      usrdata=usrdata,
                                      strict=True)
        #   - Interpret automated or user-provided data from the PypeIt
        #   file
        self.fitstbl.finalize_usr_build(frametype, setups[0])

        # --------------------------------------------------------------
        #   - Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.overwrite = overwrite

        # Currently the runtime argument determines the behavior for
        # reuse_masters.
        self.reuse_masters = reuse_masters
        self.show = show

        # Set paths
        self.calibrations_path = os.path.join(
            self.par['rdx']['redux_path'],
            self.par['calibrations']['master_dir'])

        # Check for calibrations
        if not self.calib_only:
            calibrations.check_for_calibs(
                self.par,
                self.fitstbl,
                raise_error=self.par['calibrations']['raise_chk_error'])

        # Report paths
        msgs.info('Setting reduction path to {0}'.format(
            self.par['rdx']['redux_path']))
        msgs.info('Master calibration data output to: {0}'.format(
            self.calibrations_path))
        msgs.info('Science data output to: {0}'.format(self.science_path))
        msgs.info('Quality assessment plots output to: {0}'.format(
            self.qa_path))
        # TODO: Is anything written to the qa dir or only to qa/PNGs?
        # Should we have separate calibration and science QA
        # directories?
        # An html file wrapping them all too

        # Init
        # TODO: I don't think this ever used

        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None

    @property
    def science_path(self):
        """Return the path to the science directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['scidir'])

    @property
    def qa_path(self):
        """Return the path to the top-level QA directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['qadir'])

    def build_qa(self):
        """
        Generate QA wrappers
        """
        msgs.qa_path = self.qa_path
        qa.gen_qa_dir(self.qa_path)
        qa.gen_mf_html(self.pypeit_file, self.qa_path)
        qa.gen_exp_html()

    # TODO: This should go in a more relevant place
    def spec_output_file(self, frame, twod=False):
        """
        Return the path to the spectral output data file.
        
        Args:
            frame (:obj:`int`):
                Frame index from :attr:`fitstbl`.
            twod (:obj:`bool`):
                Name for the 2D output file; 1D file otherwise.
        
        Returns:
            :obj:`str`: The path for the output file
        """
        return os.path.join(
            self.science_path,
            'spec{0}d_{1}.fits'.format('2' if twod else '1',
                                       self.fitstbl.construct_basename(frame)))

    def outfile_exists(self, frame):
        """
        Check whether the 2D outfile of a given frame already exists

        Args:
            frame (int): Frame index from fitstbl

        Returns:
            bool: True if the 2d file exists, False if it does not exist
        """
        return os.path.isfile(self.spec_output_file(frame, twod=True))

    def get_std_outfile(self, standard_frames):
        """
        Grab the output filename from an input list of standard_frame indices

        If more than one index is provided, the first is taken

        Args:
            standard_frames (list): List of indices corresponding to standard stars

        Returns:
            str: Full path to the standard spec1d output file
        """
        # TODO: Need to decide how to associate standards with
        # science frames in the case where there is more than one
        # standard associated with a given science frame.  Below, I
        # just use the first standard

        std_outfile = None
        std_frame = None if len(standard_frames) == 0 else standard_frames[0]
        # Prepare to load up standard?
        if std_frame is not None:
            std_outfile = self.spec_output_file(std_frame) \
                            if isinstance(std_frame, (int,np.integer)) else None
        if std_outfile is not None and not os.path.isfile(std_outfile):
            msgs.error('Could not find standard file: {0}'.format(std_outfile))
        return std_outfile

    def calib_all(self, run=True):
        """
        Create calibrations for all setups

        This will not crash if not all of the standard set of files are not provided

        Args:
            run (bool, optional): If False, only print the calib names and do
            not actually run.  Only used with the pypeit_parse_calib_id script

        Returns:
            dict: A simple dict summarizing the calibration names
        """
        calib_dict = {}

        self.tstart = time.time()

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))
        for i in range(self.fitstbl.n_calib_groups):
            # 1-indexed calib number
            calib_grp = str(i + 1)
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)
            grp_frames = frame_indx[in_grp]

            # Find the detectors to reduce
            #            detectors = PypeIt.select_detectors(detnum=self.par['rdx']['detnum'],
            #                                                ndet=self.spectrograph.ndet)
            detectors = self.spectrograph.select_detectors(
                subset=self.par['rdx']['detnum'])
            calib_dict[calib_grp] = {}
            # Loop on Detectors
            for self.det in detectors:
                # Instantiate Calibrations class
                self.caliBrate = calibrations.Calibrations.get_instance(
                    self.fitstbl,
                    self.par['calibrations'],
                    self.spectrograph,
                    self.calibrations_path,
                    qadir=self.qa_path,
                    reuse_masters=self.reuse_masters,
                    show=self.show,
                    user_slits=slittrace.merge_user_slit(
                        self.par['rdx']['slitspatnum'],
                        self.par['rdx']['maskIDs']))
                # Do it
                # TODO: Why isn't set_config part of the Calibrations.__init__ method?
                self.caliBrate.set_config(grp_frames[0], self.det,
                                          self.par['calibrations'])

                # Allow skipping the run (e.g. parse_calib_id.py script)
                if run:
                    self.caliBrate.run_the_steps()

                key = self.caliBrate.master_key_dict['frame']
                calib_dict[calib_grp][key] = {}
                for step in self.caliBrate.steps:
                    if step in ['bpm', 'slits', 'wv_calib', 'tilts', 'flats']:
                        continue
                    elif step == 'tiltimg':  # Annoying kludge
                        step = 'tilt'
                    # Prep
                    raw_files, self.caliBrate.master_key_dict[
                        step] = self.caliBrate._prep_calibrations(step)
                    masterframe_name = masterframe.construct_file_name(
                        buildimage.frame_image_classes[step],
                        self.caliBrate.master_key_dict[step],
                        master_dir=self.caliBrate.master_dir)

                    # Add to dict
                    if len(raw_files) > 0:
                        calib_dict[calib_grp][key][step] = {}
                        calib_dict[calib_grp][key][step][
                            'master_key'] = self.caliBrate.master_key_dict[
                                step]
                        calib_dict[calib_grp][key][step][
                            'master_name'] = os.path.basename(masterframe_name)
                        calib_dict[calib_grp][key][step]['raw_files'] = [
                            os.path.basename(ifile) for ifile in raw_files
                        ]

        # Print the results
        print(json.dumps(calib_dict, sort_keys=True, indent=4))

        # Write
        msgs.info('Writing calib file')
        calib_file = self.pypeit_file.replace('.pypeit', '.calib_ids')
        ltu.savejson(calib_file, calib_dict, overwrite=True, easy_to_read=True)

        # Finish
        self.print_end_time()

        # Return
        return calib_dict

    def reduce_all(self):
        """
        Main driver of the entire reduction

        Calibration and extraction via a series of calls to reduce_exposure()

        """
        # Validate the parameter set
        self.par.validate_keys(required=[
            'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure'
        ])
        self.tstart = time.time()

        # Find the standard frames
        is_standard = self.fitstbl.find_frames('standard')

        # Find the science frames
        is_science = self.fitstbl.find_frames('science')

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))

        # Standard Star(s) Loop
        # Iterate over each calibration group and reduce the standards
        for i in range(self.fitstbl.n_calib_groups):

            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the standard frames in this calibration group:
            grp_standards = frame_indx[is_standard & in_grp]

            # Reduce all the standard frames, loop on unique comb_id
            u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards])
            for j, comb_id in enumerate(u_combid_std):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    # Build history to document what contributd to the reduced
                    # exposure
                    history = History(self.fitstbl.frame_paths(frames[0]))
                    history.add_reduce(i, self.fitstbl, frames, bg_frames)
                    std_spec2d, std_sobjs = self.reduce_exposure(
                        frames, bg_frames=bg_frames)

                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], std_spec2d, std_sobjs,
                                       self.basename, history)
                else:
                    msgs.info(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

        # Science Frame(s) Loop
        # Iterate over each calibration group again and reduce the science frames
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the science frames in this calibration group:
            grp_science = frame_indx[is_science & in_grp]
            # Associate standards (previously reduced above) for this setup
            std_outfile = self.get_std_outfile(frame_indx[is_standard])
            # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration
            science_basename = [None] * len(grp_science)
            # Loop on unique comb_id
            u_combid = np.unique(self.fitstbl['comb_id'][grp_science])

            for j, comb_id in enumerate(u_combid):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                # Find all frames whose comb_id matches the current frames bkg_id.
                bg_frames = np.where((self.fitstbl['comb_id'] ==
                                      self.fitstbl['bkg_id'][frames][0])
                                     & (self.fitstbl['comb_id'] >= 0))[0]
                # JFH changed the syntax below to that above, which allows frames to be used more than once
                # as a background image. The syntax below would require that we could somehow list multiple
                # numbers for the bkg_id which is impossible without a comma separated list
                #                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:

                    # Build history to document what contributd to the reduced
                    # exposure
                    history = History(self.fitstbl.frame_paths(frames[0]))
                    history.add_reduce(i, self.fitstbl, frames, bg_frames)

                    # TODO -- Should we reset/regenerate self.slits.mask for a new exposure
                    sci_spec2d, sci_sobjs = self.reduce_exposure(
                        frames, bg_frames=bg_frames, std_outfile=std_outfile)
                    science_basename[j] = self.basename

                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], sci_spec2d, sci_sobjs,
                                       self.basename, history)
                else:
                    msgs.warn(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

            msgs.info('Finished calibration group {0}'.format(i))

        # Finish
        self.print_end_time()

    def reduce_exposure(self, frames, bg_frames=None, std_outfile=None):
        """
        Reduce a single exposure

        Args:
            frames (:obj:`list`):
                List of 0-indexed rows in :attr:`fitstbl` with the frames to
                reduce.
            bg_frames (:obj:`list`, optional):
                List of frame indices for the background.
            std_outfile (:obj:`str`, optional):
                File with a previously reduced standard spectrum from
                PypeIt.

        Returns:
            dict: The dictionary containing the primary outputs of
            extraction.

        """

        # if show is set, clear the ginga channels at the start of each new sci_ID
        if self.show:
            # TODO: Put this in a try/except block?
            display.clear_all(allow_new=True)

        has_bg = True if bg_frames is not None and len(
            bg_frames) > 0 else False
        # Is this an b/g subtraction reduction?
        if has_bg:
            self.bkg_redux = True
            # The default is to find_negative objects if the bg_frames are classified as "science", and to not find_negative
            # objects if the bg_frames are classified as "sky". This can be explicitly overridden if
            # par['reduce']['findobj']['find_negative'] is set to something other than the default of None.
            self.find_negative = (('science' in self.fitstbl['frametype'][bg_frames[0]]) |
                                  ('standard' in self.fitstbl['frametype'][bg_frames[0]]))\
                if self.par['reduce']['findobj']['find_negative'] is None else self.par['reduce']['findobj']['find_negative']
        else:
            self.bkg_redux = False
            self.find_negative = False

        # Container for all the Spec2DObj
        all_spec2d = spec2dobj.AllSpec2DObj()
        all_spec2d['meta']['bkg_redux'] = self.bkg_redux
        all_spec2d['meta']['find_negative'] = self.find_negative
        # TODO -- Should we reset/regenerate self.slits.mask for a new exposure

        # container for specobjs during first loop (objfind)
        all_specobjs_objfind = specobjs.SpecObjs()
        # container for specobjs during second loop (extraction)
        all_specobjs_extract = specobjs.SpecObjs()
        # list of global_sky obtained during objfind and used in extraction
        initial_sky_list = []
        # list of sciImg
        sciImg_list = []
        # List of detectors with successful calibration
        calibrated_det = []
        # list of successful MasterSlits calibrations to be used in the extraction loop
        calib_slits = []
        # List of objFind objects
        objFind_list = []

        # Print status message
        msgs_string = 'Reducing target {:s}'.format(
            self.fitstbl['target'][frames[0]]) + msgs.newline()
        # TODO: Print these when the frames are actually combined,
        # backgrounds are used, etc?
        msgs_string += 'Combining frames:' + msgs.newline()
        for iframe in frames:
            msgs_string += '{0:s}'.format(
                self.fitstbl['filename'][iframe]) + msgs.newline()
        msgs.info(msgs_string)
        if has_bg:
            bg_msgs_string = ''
            for iframe in bg_frames:
                bg_msgs_string += '{0:s}'.format(
                    self.fitstbl['filename'][iframe]) + msgs.newline()
            bg_msgs_string = msgs.newline(
            ) + 'Using background from frames:' + msgs.newline(
            ) + bg_msgs_string
            msgs.info(bg_msgs_string)

        # Find the detectors to reduce
        subset = self.par['rdx']['slitspatnum'] if self.par['rdx']['slitspatnum'] is not None \
                    else self.par['rdx']['detnum']
        detectors = self.spectrograph.select_detectors(subset=subset)
        msgs.info(f'Detectors to work on: {detectors}')

        # Loop on Detectors
        # TODO: Attempt to put in a multiprocessing call here?
        # objfind
        for self.det in detectors:
            msgs.info("Working on detector {0}".format(self.det))
            # run calibration
            self.caliBrate = self.calib_one(frames, self.det)
            if not self.caliBrate.success:
                msgs.warn(
                    f'Calibrations for detector {self.det} were unsuccessful!  The step '
                    f'that failed was {self.caliBrate.failed_step}.  Continuing by '
                    f'skipping this detector.')
                continue

            # we save only the detectors that had a successful calibration,
            # and we use only those in the extract loop below
            calibrated_det.append(self.det)
            # we also save the successful MasterSlits calibrations because they are used and modified
            # in the slitmask stuff in between the two loops
            calib_slits.append(self.caliBrate.slits)
            # global_sky, skymask and sciImg are needed in the extract loop
            initial_sky, sobjs_obj, sciImg, objFind = self.objfind_one(
                frames, self.det, bg_frames, std_outfile=std_outfile)
            if len(sobjs_obj) > 0:
                all_specobjs_objfind.add_sobj(sobjs_obj)
            initial_sky_list.append(initial_sky)
            sciImg_list.append(sciImg)
            objFind_list.append(objFind)

        # slitmask stuff
        if self.par['reduce']['slitmask']['assign_obj']:
            # get object positions from slitmask design and slitmask offsets for all the detectors
            spat_flexure = np.array([ss.spat_flexure for ss in sciImg_list])
            # Grab platescale with binning
            bin_spec, bin_spat = parse.parse_binning(self.binning)
            platescale = np.array(
                [ss.detector.platescale * bin_spat for ss in sciImg_list])
            # get the dither offset if available
            if self.par['reduce']['slitmask']['use_dither_offset']:
                dither = self.spectrograph.parse_dither_pattern(
                    [self.fitstbl.frame_paths(frames[0])])
                dither_off = dither[2][0] if dither is not None else None
            else:
                dither_off = None
            calib_slits = slittrace.get_maskdef_objpos_offset_alldets(
                all_specobjs_objfind,
                calib_slits,
                spat_flexure,
                platescale,
                self.par['calibrations']['slitedges']['det_buffer'],
                self.par['reduce']['slitmask'],
                dither_off=dither_off)
            # determine if slitmask offsets exist and compute an average offsets over all the detectors
            calib_slits = slittrace.average_maskdef_offset(
                calib_slits, platescale[0],
                self.spectrograph.list_detectors(
                    mosaic='MSC' in calib_slits[0].detname))
            # slitmask design matching and add undetected objects
            all_specobjs_objfind = slittrace.assign_addobjs_alldets(
                all_specobjs_objfind, calib_slits, spat_flexure, platescale,
                self.par['reduce']['slitmask'],
                self.par['reduce']['findobj']['find_fwhm'])

        # Extract
        for i, self.det in enumerate(calibrated_det):
            # re-run (i.e., load) calibrations
            self.caliBrate = self.calib_one(frames, self.det)
            self.caliBrate.slits = calib_slits[i]

            detname = sciImg_list[i].detector.name

            # TODO: pass back the background frame, pass in background
            # files as an argument. extract one takes a file list as an
            # argument and instantiates science within
            if all_specobjs_objfind.nobj > 0:
                all_specobjs_on_det = all_specobjs_objfind[
                    all_specobjs_objfind.DET == detname]
            else:
                all_specobjs_on_det = all_specobjs_objfind

            # Extract
            all_spec2d[detname], tmp_sobjs \
                    = self.extract_one(frames, self.det, sciImg_list[i], objFind_list[i],
                                       initial_sky_list[i], all_specobjs_on_det)
            # Hold em
            if tmp_sobjs.nobj > 0:
                all_specobjs_extract.add_sobj(tmp_sobjs)
            # JFH TODO write out the background frame?

            # TODO -- Save here?  Seems like we should.  Would probably need to use update_det=True

        # Return
        return all_spec2d, all_specobjs_extract

    def get_sci_metadata(self, frame, det):
        """
        Grab the meta data for a given science frame and specific detector

        Args:
            frame (int): Frame index
            det (int): Detector index

        Returns:
            5 objects are returned::
                - str: Object type;  science or standard
                - str: Setup string from master_key()
                - astropy.time.Time: Time of observation
                - str: Basename of the frame
                - str: Binning of the detector

        """

        # Set binning, obstime, basename, and objtype
        binning = self.fitstbl['binning'][frame]
        obstime = self.fitstbl.construct_obstime(frame)
        basename = self.fitstbl.construct_basename(frame, obstime=obstime)
        objtype = self.fitstbl['frametype'][frame]
        if 'science' in objtype:
            objtype_out = 'science'
        elif 'standard' in objtype:
            objtype_out = 'standard'
        else:
            msgs.error('Unrecognized objtype')
        setup = self.fitstbl.master_key(frame, det=det)
        return objtype_out, setup, obstime, basename, binning

    def calib_one(self, frames, det):
        """
        Run Calibration for a single exposure/detector pair

        Args:
            frames (:obj:`list`):
                List of frames to extract; stacked if more than one
                is provided
            det (:obj:`int`):
                Detector number (1-indexed)

        Returns:
            caliBrate (:class:`pypeit.calibrations.Calibrations`)

        """

        msgs.info("Working on detector {0}".format(det))
        # Instantiate Calibrations class
        caliBrate = calibrations.Calibrations.get_instance(
            self.fitstbl,
            self.par['calibrations'],
            self.spectrograph,
            self.calibrations_path,
            qadir=self.qa_path,
            reuse_masters=self.reuse_masters,
            show=self.show,
            user_slits=slittrace.merge_user_slit(
                self.par['rdx']['slitspatnum'], self.par['rdx']['maskIDs']))
        #slitspat_num=self.par['rdx']['slitspatnum'])
        # These need to be separate to accomodate COADD2D
        caliBrate.set_config(frames[0], det, self.par['calibrations'])
        caliBrate.run_the_steps()

        return caliBrate

    def objfind_one(self, frames, det, bg_frames, std_outfile=None):
        """
        Reduce + Find Objects in a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Parameters
        ----------
        frames : :obj:`list`
            List of frames to extract; stacked if more than one is provided
        det : :obj:`int`
            Detector number (1-indexed)
        bg_frames : :obj:`list`
            List of frames to use as the background. Can be empty.
        std_outfile : :obj:`str`, optional
            Filename for the standard star spec1d file. Passed directly to
            :func:`get_std_trace`.

        Returns
        -------
        global_sky : `numpy.ndarray`_
            Initial global sky model
        sobjs_obj : :class:`~pypeit.specobjs.SpecObjs`
            List of objects found
        sciImg : :class:`~pypeit.images.pypeitimage.PypeItImage`
            Science image
        objFind : :class:`~pypeit.find_objects.FindObjects`
            Object finding speobject

        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning \
                = self.get_sci_metadata(frames[0], det)

        msgs.info("Object finding begins for {} on det={}".format(
            self.basename, det))

        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype
        frame_par = self.par['calibrations'][
            'standardframe'] if self.std_redux else self.par['scienceframe']
        # Get the standard trace if need be

        if self.std_redux is False and std_outfile is not None:
            std_trace = specobjs.get_std_trace(
                self.spectrograph.get_det_name(det), std_outfile)
        else:
            std_trace = None

        # Build Science image
        sci_files = self.fitstbl.frame_paths(frames)
        sciImg = buildimage.buildimage_fromlist(
            self.spectrograph,
            det,
            frame_par,
            sci_files,
            bias=self.caliBrate.msbias,
            bpm=self.caliBrate.msbpm,
            dark=self.caliBrate.msdark,
            flatimages=self.caliBrate.flatimages,
            slits=self.caliBrate.slits,  # For flexure correction
            ignore_saturation=False)

        # Background Image?
        if len(bg_frames) > 0:
            bg_file_list = self.fitstbl.frame_paths(bg_frames)
            sciImg = sciImg.sub(
                buildimage.buildimage_fromlist(
                    self.spectrograph,
                    det,
                    frame_par,
                    bg_file_list,
                    bpm=self.caliBrate.msbpm,
                    bias=self.caliBrate.msbias,
                    dark=self.caliBrate.msdark,
                    flatimages=self.caliBrate.flatimages,
                    slits=self.caliBrate.slits,  # For flexure correction
                    ignore_saturation=False),
                frame_par['process'])

        # Deal with manual extraction
        row = self.fitstbl[frames[0]]
        manual_obj = ManualExtractionObj.by_fitstbl_input(
            row['filename'], row['manual'],
            self.spectrograph) if len(row['manual'].strip()) > 0 else None

        # Instantiate Reduce object
        # Required for pypeline specific object
        # At instantiaton, the fullmask in self.sciImg is modified
        objFind = find_objects.FindObjects.get_instance(
            sciImg,
            self.spectrograph,
            self.par,
            self.caliBrate,
            self.objtype,
            bkg_redux=self.bkg_redux,
            manual=manual_obj,
            find_negative=self.find_negative,
            std_redux=self.std_redux,
            show=self.show,
            basename=self.basename)

        # Do it
        initial_sky, sobjs_obj = objFind.run(std_trace=std_trace,
                                             show_peaks=self.show)
        # Return
        return initial_sky, sobjs_obj, sciImg, objFind

    def extract_one(self, frames, det, sciImg, objFind, initial_sky,
                    sobjs_obj):
        """
        Extract Objects in a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Args:
            frames (:obj:`list`):
                List of frames to extract; stacked if more than one
                is provided
            det (:obj:`int`):
                Detector number (1-indexed)
            sciImg (:class:`PypeItImage`):
                Data container that holds a single image from a
                single detector its related images (e.g. ivar, mask)
            objFind : :class:`~pypeit.find_objects.FindObjects`
                Object finding object
            initial_sky (`numpy.ndarray`_):
                Initial global sky model
            sobjs_obj (:class:`pypeit.specobjs.SpecObjs`):
                List of objects found during `run_objfind`

        Returns:
            tuple: Returns six `numpy.ndarray`_ objects and a
            :class:`pypeit.specobjs.SpecObjs` object with the
            extracted spectra from this exposure/detector pair. The
            six `numpy.ndarray`_ objects are (1) the science image,
            (2) its inverse variance, (3) the sky model, (4) the
            object model, (5) the model inverse variance, and (6) the
            mask.
        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning \
                = self.get_sci_metadata(frames[0], det)
        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype

        # Update the skymask
        skymask = objFind.create_skymask(sobjs_obj)
        # Update the global sky
        if 'standard' in self.fitstbl['frametype'][frames[0]] or \
                self.par['reduce']['findobj']['skip_final_global'] or \
                self.par['reduce']['skysub']['load_mask'] or \
                self.par['reduce']['skysub']['user_regions'] is not None:
            final_global_sky = initial_sky
        else:
            final_global_sky = objFind.global_skysub(previous_sky=initial_sky,
                                                     skymask=skymask,
                                                     show=self.show)
        scaleImg = objFind.scaleimg

        # update here slits.mask since global_skysub modify reduce_bpm and we need to propagate it into extraction
        flagged_slits = np.where(objFind.reduce_bpm)[0]
        if len(flagged_slits) > 0:
            self.caliBrate.slits.mask[flagged_slits] = \
                self.caliBrate.slits.bitmask.turn_on(self.caliBrate.slits.mask[flagged_slits], 'BADREDUCE')

        msgs.info("Extraction begins for {} on det={}".format(
            self.basename, det))

        # Instantiate Reduce object
        # Required for pypeline specific object
        # At instantiaton, the fullmask in self.sciImg is modified
        # TODO Are we repeating steps in the init for FindObjects and Extract??
        self.exTract = extraction.Extract.get_instance(
            sciImg,
            sobjs_obj,
            self.spectrograph,
            self.par,
            self.caliBrate,
            self.objtype,
            bkg_redux=self.bkg_redux,
            return_negative=self.par['reduce']['extraction']
            ['return_negative'],
            std_redux=self.std_redux,
            show=self.show,
            basename=self.basename)

        if not self.par['reduce']['extraction']['skip_extraction']:
            skymodel, objmodel, ivarmodel, outmask, sobjs, waveImg, \
                tilts = self.exTract.run(final_global_sky, ra=self.fitstbl["ra"][frames[0]],
                                         dec=self.fitstbl["dec"][frames[0]], obstime=self.obstime)
        else:
            # Although exrtaction is not performed, still need to prepare some masks and the tilts
            self.exTract.prepare_extraction()
            # Since the extraction was not performed, fill the arrays with the best available information
            skymodel = final_global_sky
            objmodel = np.zeros_like(self.exTract.sciImg.image)
            ivarmodel = np.copy(self.exTract.sciImg.ivar)
            outmask = self.exTract.sciImg.fullmask
            waveImg = self.exTract.waveimg
            tilts = self.exTract.tilts
            sobjs = sobjs_obj

        # TODO -- Do this upstream
        # Tack on detector and wavelength RMS
        for sobj in sobjs:
            sobj.DETECTOR = sciImg.detector
            iwv = np.where(
                self.caliBrate.wv_calib.spat_ids == sobj.SLITID)[0][0]
            sobj.WAVE_RMS = self.caliBrate.wv_calib.wv_fits[iwv].rms

        # Construct table of spectral flexure
        spec_flex_table = Table()
        spec_flex_table['spat_id'] = self.caliBrate.slits.spat_id
        spec_flex_table['sci_spec_flexure'] = self.exTract.slitshift

        # pull out maskdef_designtab from caliBrate.slits
        maskdef_designtab = self.caliBrate.slits.maskdef_designtab
        slits = copy.deepcopy(self.caliBrate.slits)
        slits.maskdef_designtab = None

        # Construct the Spec2DObj
        spec2DObj = spec2dobj.Spec2DObj(
            sciimg=sciImg.image,
            ivarraw=sciImg.ivar,
            skymodel=skymodel,
            objmodel=objmodel,
            ivarmodel=ivarmodel,
            scaleimg=scaleImg,
            waveimg=waveImg,
            bpmmask=outmask,
            detector=sciImg.detector,
            sci_spat_flexure=sciImg.spat_flexure,
            sci_spec_flexure=spec_flex_table,
            vel_corr=self.exTract.vel_corr,
            vel_type=self.par['calibrations']['wavelengths']['refframe'],
            tilts=tilts,
            slits=slits,
            maskdef_designtab=maskdef_designtab)
        spec2DObj.process_steps = sciImg.process_steps

        # QA
        spec2DObj.gen_qa()

        # Return
        return spec2DObj, sobjs

    def save_exposure(self,
                      frame,
                      all_spec2d,
                      all_specobjs,
                      basename,
                      history=None):
        """
        Save the outputs from extraction for a given exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in the metadata table with the frame
                that has been reduced.
            all_spec2d(:class:`pypeit.spec2dobj.AllSpec2DObj`):
            sci_dict (:obj:`dict`):
                Dictionary containing the primary outputs of
                extraction
            basename (:obj:`str`):
                The root name for the output file.
            history (:obj:`pypeit.history.History`):
                History entries to be added to fits header
        Returns:
            None or SpecObjs:  All of the objects saved to disk

        """
        # TODO: Need some checks here that the exposure has been reduced?

        # Determine the headers
        row_fitstbl = self.fitstbl[frame]
        # Need raw file header information
        rawfile = self.fitstbl.frame_paths(frame)
        head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext)

        # Check for the directory
        if not os.path.isdir(self.science_path):
            os.makedirs(self.science_path)

        # NOTE: There are some gymnastics here to keep from altering
        # self.par['rdx']['detnum'].  I.e., I can't just set update_det =
        # self.par['rdx']['detnum'] because that can alter the latter if I don't
        # deepcopy it...
        if self.par['rdx']['detnum'] is None:
            update_det = None
        elif isinstance(self.par['rdx']['detnum'], list):
            update_det = [
                self.spectrograph.allowed_mosaics.index(d) +
                1 if isinstance(d, tuple) else d
                for d in self.par['rdx']['detnum']
            ]
        else:
            update_det = self.par['rdx']['detnum']

        subheader = self.spectrograph.subheader_for_spec(row_fitstbl, head2d)
        # 1D spectra
        if all_specobjs.nobj > 0:
            # Spectra
            outfile1d = os.path.join(self.science_path,
                                     'spec1d_{:s}.fits'.format(basename))
            # TODO
            #embed(header='deal with the following for maskIDs;  713 of pypeit')
            all_specobjs.write_to_fits(
                subheader,
                outfile1d,
                update_det=update_det,
                slitspatnum=self.par['rdx']['slitspatnum'],
                history=history)
            # Info
            outfiletxt = os.path.join(self.science_path,
                                      'spec1d_{:s}.txt'.format(basename))
            # TODO: Note we re-read in the specobjs from disk to deal with situations where
            # only a single detector is run in a second pass but in the same reduction directory.
            # Thiw was to address Issue #1116 in PR #1154. Slightly inefficient, but only other
            # option is to re-work write_info to also "append"
            sobjs = specobjs.SpecObjs.from_fitsfile(outfile1d,
                                                    chk_version=False)
            sobjs.write_info(outfiletxt, self.spectrograph.pypeline)
            #all_specobjs.write_info(outfiletxt, self.spectrograph.pypeline)

        # 2D spectra
        outfile2d = os.path.join(self.science_path,
                                 'spec2d_{:s}.fits'.format(basename))
        # Build header
        pri_hdr = all_spec2d.build_primary_hdr(
            head2d,
            self.spectrograph,
            redux_path=self.par['rdx']['redux_path'],
            master_key_dict=self.caliBrate.master_key_dict,
            master_dir=self.caliBrate.master_dir,
            subheader=subheader,
            history=history)

        # Write
        all_spec2d.write_to_fits(outfile2d,
                                 pri_hdr=pri_hdr,
                                 update_det=update_det,
                                 slitspatnum=self.par['rdx']['slitspatnum'])

    def msgs_reset(self):
        """
        Reset the msgs object
        """

        # Reset the global logger
        msgs.reset(log=self.logname, verbosity=self.verbosity)
        msgs.pypeit_file = self.pypeit_file

    def print_end_time(self):
        """
        Print the elapsed time
        """
        # Capture the end time and print it to user
        msgs.info(utils.get_time_string(time.time() - self.tstart))

    # TODO: Move this to fitstbl?
    def show_science(self):
        """
        Simple print of science frames
        """
        indx = self.fitstbl.find_frames('science')
        print(self.fitstbl[['target', 'ra', 'dec', 'exptime',
                            'dispname']][indx])

    def __repr__(self):
        # Generate sets string
        return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__,
                                               self.pypeit_file)
Ejemplo n.º 20
0
def dummy_fitstbl(nfile=10,
                  spectro_name='shane_kast_blue',
                  directory='',
                  notype=False):
    """
    Generate a dummy fitstbl for testing

    Parameters
    ----------
    nfile : int, optional
      Number of files to mimic
    spectro_name : str, optional
      Name of spectrograph to mimic
    notype : bool (optional)
      If True, do not add image type info to the fitstbl

    Returns
    -------
    fitstbl : PypeItMetaData

    """
    fitsdict = {}
    fitsdict['index'] = np.arange(nfile)
    fitsdict['directory'] = [directory] * nfile
    fitsdict['filename'] = ['b{:03d}.fits.gz'.format(i) for i in range(nfile)]
    # TODO: The below will fail at 60
    dates = ['2015-01-23T00:{:02d}:11.04'.format(i) for i in range(nfile)]
    ttime = time.Time(dates, format='isot')
    fitsdict['mjd'] = ttime.mjd
    fitsdict['target'] = ['Dummy'] * nfile
    fitsdict['ra'] = ['00:00:00'] * nfile
    fitsdict['dec'] = ['+00:00:00'] * nfile
    fitsdict['exptime'] = [300.] * nfile
    fitsdict['dispname'] = ['600/4310'] * nfile
    fitsdict['dichroic'] = ['560'] * nfile
    fitsdict["binning"] = ['1,1'] * nfile
    fitsdict["airmass"] = [1.0] * nfile

    if spectro_name == 'shane_kast_blue':
        fitsdict['numamplifiers'] = [1] * nfile
        # Lamps
        for i in range(1, 17):
            fitsdict['lampstat{:02d}'.format(i)] = ['off'] * nfile
        fitsdict['exptime'][0] = 0  # Bias
        fitsdict['lampstat06'][1] = 'on'  # Arc
        fitsdict['exptime'][1] = 30  # Arc
        fitsdict['lampstat01'][2] = 'on'  # Trace, pixel, slit flat
        fitsdict['lampstat01'][3] = 'on'  # Trace, pixel, slit flat
        fitsdict['exptime'][2] = 30  # flat
        fitsdict['exptime'][3] = 30  # flat
        fitsdict['ra'][4] = '05:06:36.6'  # Standard
        fitsdict['dec'][4] = '52:52:01.0'
        fitsdict['airmass'][4] = 1.2
        fitsdict['ra'][5] = '07:06:23.45'  # Random object
        fitsdict['dec'][5] = '+30:20:50.5'
        fitsdict['decker'] = ['0.5 arcsec'] * nfile

    # arrays
    for k in fitsdict.keys():
        fitsdict[k] = np.array(fitsdict[k])

    spectrograph = load_spectrograph(spectro_name)
    fitstbl = PypeItMetaData(spectrograph,
                             spectrograph.default_pypeit_par(),
                             data=fitsdict)
    fitstbl['instrume'] = spectro_name
    type_bits = np.zeros(len(fitstbl),
                         dtype=fitstbl.type_bitmask.minimum_dtype())

    # Image typing
    if not notype:
        if spectro_name == 'shane_kast_blue':
            #fitstbl['sci_ID'] = 1  # This links all the files to the science object
            type_bits[0] = fitstbl.type_bitmask.turn_on(type_bits[0],
                                                        flag='bias')
            type_bits[1] = fitstbl.type_bitmask.turn_on(type_bits[1],
                                                        flag='arc')
            type_bits[1] = fitstbl.type_bitmask.turn_on(type_bits[1],
                                                        flag='tilt')
            type_bits[2:4] = fitstbl.type_bitmask.turn_on(
                type_bits[2:4], flag=['pixelflat', 'trace', 'illumflat'])
            type_bits[4] = fitstbl.type_bitmask.turn_on(type_bits[4],
                                                        flag='standard')
            type_bits[5:] = fitstbl.type_bitmask.turn_on(type_bits[5:],
                                                         flag='science')
            fitstbl.set_frame_types(type_bits)
            # Calibration groups
            cfgs = fitstbl.unique_configurations(
                ignore_frames=['bias', 'dark'])
            fitstbl.set_configurations(cfgs)
            fitstbl.set_calibration_groups(global_frames=['bias', 'dark'])

    return fitstbl
Ejemplo n.º 21
0
class PypeIt(object):
    """
    This class runs the primary calibration and extraction in PypeIt

    .. todo::
        Fill in list of attributes!

    Args:
        pypeit_file (:obj:`str`):
            PypeIt filename.
        verbosity (:obj:`int`, optional):
            Verbosity level of system output.  Can be:

                - 0: No output
                - 1: Minimal output (default)
                - 2: All output

        overwrite (:obj:`bool`, optional):
            Flag to overwrite any existing files/directories.
        reuse_masters (:obj:`bool`, optional):
            Reuse any pre-existing calibration files
        logname (:obj:`str`, optional):
            The name of an ascii log file with the details of the
            reduction.
        show: (:obj:`bool`, optional):
            Show reduction steps via plots (which will block further
            execution until clicked on) and outputs to ginga. Requires
            remote control ginga session via ``ginga --modules=RC &``
        redux_path (:obj:`str`, optional):
            Over-ride reduction path in PypeIt file (e.g. Notebook usage)

    Attributes:
        pypeit_file (:obj:`str`):
            Name of the pypeit file to read.  PypeIt files have a
            specific set of valid formats. A description can be found
            :ref:`pypeit_file`.
        fitstbl (:obj:`pypit.metadata.PypeItMetaData`): holds the meta info

    """

    #    __metaclass__ = ABCMeta

    def __init__(self,
                 pypeit_file,
                 verbosity=2,
                 overwrite=True,
                 reuse_masters=False,
                 logname=None,
                 show=False,
                 redux_path=None):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups \
                = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name,
                                              ifile=data_files[0])
        msgs.info('Loaded spectrograph {0}'.format(
            self.spectrograph.spectrograph))

        # --------------------------------------------------------------
        # Get the full set of PypeIt parameters
        #   - Grab a science or standard file for configuration specific parameters
        scistd_file = None
        for idx, row in enumerate(usrdata):
            if ('science' in row['frametype']) or ('standard'
                                                   in row['frametype']):
                scistd_file = data_files[idx]
                break
        #   - Configuration specific parameters for the spectrograph
        if scistd_file is not None:
            msgs.info(
                'Setting configuration-specific parameters using {0}'.format(
                    os.path.split(scistd_file)[1]))
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(
            scistd_file).to_config()
        #   - Build the full set, merging with any user-provided
        #     parameters
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)
        msgs.info('Built full PypeIt parameter set.')

        # Check the output paths are ready
        if redux_path is not None:
            self.par['rdx']['redux_path'] = redux_path

        # TODO: Write the full parameter set here?
        # --------------------------------------------------------------

        # --------------------------------------------------------------
        # Build the meta data
        #   - Re-initilize based on the file data
        msgs.info('Compiling metadata')
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      files=data_files,
                                      usrdata=usrdata,
                                      strict=True)
        #   - Interpret automated or user-provided data from the PypeIt
        #   file
        self.fitstbl.finalize_usr_build(frametype, setups[0])
        # --------------------------------------------------------------
        #   - Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite

        # Currently the runtime argument determines the behavior for
        # reuse_masters.
        self.reuse_masters = reuse_masters
        self.show = show

        # Set paths
        if self.par['calibrations']['caldir'] == 'default':
            self.calibrations_path = os.path.join(
                self.par['rdx']['redux_path'], 'Masters')
        else:
            self.calibrations_path = self.par['calibrations']['caldir']

        # Report paths
        msgs.info('Setting reduction path to {0}'.format(
            self.par['rdx']['redux_path']))
        msgs.info('Master calibration data output to: {0}'.format(
            self.calibrations_path))
        msgs.info('Science data output to: {0}'.format(self.science_path))
        msgs.info('Quality assessment plots output to: {0}'.format(
            self.qa_path))
        # TODO: Is anything written to the qa dir or only to qa/PNGs?
        # Should we have separate calibration and science QA
        # directories?

        # Instantiate Calibrations class
        self.caliBrate \
            = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'],
                                                 self.spectrograph,
                                                 caldir=self.calibrations_path,
                                                 qadir=self.qa_path,
                                                 reuse_masters=self.reuse_masters,
                                                 show=self.show)
        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.frame = None
        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None

    @property
    def science_path(self):
        """Return the path to the science directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['scidir'])

    @property
    def qa_path(self):
        """Return the path to the top-level QA directory."""
        return os.path.join(self.par['rdx']['redux_path'],
                            self.par['rdx']['qadir'])

    def build_qa(self):
        """
        Generate QA wrappers
        """
        qa.gen_mf_html(self.pypeit_file, self.qa_path)
        qa.gen_exp_html()

    # TODO: This should go in a more relevant place
    def spec_output_file(self, frame, twod=False):
        """
        Return the path to the spectral output data file.
        
        Args:
            frame (:obj:`int`):
                Frame index from :attr:`fitstbl`.
            twod (:obj:`bool`):
                Name for the 2D output file; 1D file otherwise.
        
        Returns:
            :obj:`str`: The path for the output file
        """
        return os.path.join(
            self.science_path,
            'spec{0}d_{1}.fits'.format('2' if twod else '1',
                                       self.fitstbl.construct_basename(frame)))

    def outfile_exists(self, frame):
        """
        Check whether the 2D outfile of a given frame already exists

        Args:
            frame (int): Frame index from fitstbl

        Returns:
            bool: True if the 2d file exists, False if it does not exist
        """
        return os.path.isfile(self.spec_output_file(frame, twod=True))

    def get_std_outfile(self, standard_frames):
        """
        Grab the output filename from an input list of standard_frame indices

        If more than one index is provided, the first is taken

        Args:
            standard_frames (list): List of indices corresponding to standard stars

        Returns:
            str: Full path to the standard spec1d output file
        """
        # TODO: Need to decide how to associate standards with
        # science frames in the case where there is more than one
        # standard associated with a given science frame.  Below, I
        # just use the first standard

        std_outfile = None
        std_frame = None if len(standard_frames) == 0 else standard_frames[0]
        # Prepare to load up standard?
        if std_frame is not None:
            std_outfile = self.spec_output_file(std_frame) \
                            if isinstance(std_frame, (int,np.integer)) else None
        if std_outfile is not None and not os.path.isfile(std_outfile):
            msgs.error('Could not find standard file: {0}'.format(std_outfile))
        return std_outfile

    def reduce_all(self):
        """
        Main driver of the entire reduction

        Calibration and extraction via a series of calls to reduce_exposure()

        """
        # Validate the parameter set
        required = [
            'rdx', 'calibrations', 'scienceframe', 'reduce', 'flexure',
            'fluxcalib'
        ]
        can_be_None = ['flexure', 'fluxcalib']
        self.par.validate_keys(required=required, can_be_None=can_be_None)

        self.tstart = time.time()

        # Find the standard frames
        is_standard = self.fitstbl.find_frames('standard')

        # Find the science frames
        is_science = self.fitstbl.find_frames('science')

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))

        # Iterate over each calibration group and reduce the standards
        for i in range(self.fitstbl.n_calib_groups):

            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the standard frames in this calibration group:
            grp_standards = frame_indx[is_standard & in_grp]

            # Reduce all the standard frames, loop on unique comb_id
            u_combid_std = np.unique(self.fitstbl['comb_id'][grp_standards])
            for j, comb_id in enumerate(u_combid_std):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    std_dict = self.reduce_exposure(frames,
                                                    bg_frames=bg_frames)
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], std_dict, self.basename)
                else:
                    msgs.info(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

        # Iterate over each calibration group again and reduce the science frames
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the science frames in this calibration group:
            grp_science = frame_indx[is_science & in_grp]
            # Associate standards (previously reduced above) for this setup
            std_outfile = self.get_std_outfile(frame_indx[is_standard])
            # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration
            science_basename = [None] * len(grp_science)
            # Loop on unique comb_id
            u_combid = np.unique(self.fitstbl['comb_id'][grp_science])
            for j, comb_id in enumerate(u_combid):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                # Find all frames whose comb_id matches the current frames bkg_id.
                bg_frames = np.where((self.fitstbl['comb_id'] ==
                                      self.fitstbl['bkg_id'][frames][0])
                                     & (self.fitstbl['comb_id'] >= 0))[0]
                # JFH changed the syntax below to that above, which allows frames to be used more than once
                # as a background image. The syntax below would require that we could somehow list multiple
                # numbers for the bkg_id which is impossible without a comma separated list
                #                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    sci_dict = self.reduce_exposure(frames,
                                                    bg_frames=bg_frames,
                                                    std_outfile=std_outfile)
                    science_basename[j] = self.basename
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], sci_dict, self.basename)
                else:
                    msgs.warn(
                        'Output file: {:s} already exists'.format(
                            self.fitstbl.construct_basename(frames[0])) +
                        '. Set overwrite=True to recreate and overwrite.')

            msgs.info('Finished calibration group {0}'.format(i))

        # Finish
        self.print_end_time()

    # This is a static method to allow for use in coadding script
    @staticmethod
    def select_detectors(detnum=None, ndet=1):
        """
        Return the 1-indexed list of detectors to reduce.

        Args:
            detnum (:obj:`int`, :obj:`list`, optional):
                One or more detectors to reduce.  If None, return the
                full list for the provided number of detectors (`ndet`).
            ndet (:obj:`int`, optional):
                The number of detectors for this instrument.  Only used
                if `detnum is None`.

        Returns:
            list:  List of detectors to be reduced

        """
        if detnum is None:
            return np.arange(1, ndet + 1).tolist()
        return [detnum] if isinstance(detnum, int) else detnum

    def reduce_exposure(self, frames, bg_frames=None, std_outfile=None):
        """
        Reduce a single exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in :attr:`fitstbl` with the frame to
                reduce.
            bg_frames (:obj:`list`, optional):
                List of frame indices for the background.
            std_outfile (:obj:`str`, optional):
                File with a previously reduced standard spectrum from
                PypeIt.

        Returns:
            dict: The dictionary containing the primary outputs of
            extraction.

        """

        # TODO:
        # - bg_frames should be None by default
        # - change doc string to reflect that more than one frame can be
        #   provided

        # if show is set, clear the ginga channels at the start of each new sci_ID
        if self.show:
            # TODO: Put this in a try/except block?
            ginga.clear_all()

        has_bg = True if bg_frames is not None and len(
            bg_frames) > 0 else False

        # Is this an IR reduction?
        # TODO: Why specific to IR?
        self.ir_redux = True if has_bg else False

        # TODO: JFH Why does this need to be ordered?
        sci_dict = OrderedDict()  # This needs to be ordered
        sci_dict['meta'] = {}
        sci_dict['meta']['ir_redux'] = self.ir_redux

        # Print status message
        msgs_string = 'Reducing target {:s}'.format(
            self.fitstbl['target'][frames[0]]) + msgs.newline()
        # TODO: Print these when the frames are actually combined,
        # backgrounds are used, etc?
        msgs_string += 'Combining frames:' + msgs.newline()
        for iframe in frames:
            msgs_string += '{0:s}'.format(
                self.fitstbl['filename'][iframe]) + msgs.newline()
        msgs.info(msgs_string)
        if has_bg:
            bg_msgs_string = ''
            for iframe in bg_frames:
                bg_msgs_string += '{0:s}'.format(
                    self.fitstbl['filename'][iframe]) + msgs.newline()
            bg_msgs_string = msgs.newline(
            ) + 'Using background from frames:' + msgs.newline(
            ) + bg_msgs_string
            msgs.info(bg_msgs_string)

        # Find the detectors to reduce
        detectors = PypeIt.select_detectors(detnum=self.par['rdx']['detnum'],
                                            ndet=self.spectrograph.ndet)
        if len(detectors) != self.spectrograph.ndet:
            msgs.warn('Not reducing detectors: {0}'.format(' '.join([
                str(d) for d in set(np.arange(self.spectrograph.ndet)) -
                set(detectors)
            ])))

        # Loop on Detectors
        for self.det in detectors:
            msgs.info("Working on detector {0}".format(self.det))
            sci_dict[self.det] = {}
            # Calibrate
            #TODO Is the right behavior to just use the first frame?
            self.caliBrate.set_config(frames[0], self.det,
                                      self.par['calibrations'])
            self.caliBrate.run_the_steps()
            # Extract
            # TODO: pass back the background frame, pass in background
            # files as an argument. extract one takes a file list as an
            # argument and instantiates science within
            sci_dict[self.det]['sciimg'], sci_dict[self.det]['sciivar'], \
                sci_dict[self.det]['skymodel'], sci_dict[self.det]['objmodel'], \
                sci_dict[self.det]['ivarmodel'], sci_dict[self.det]['outmask'], \
                sci_dict[self.det]['specobjs'], \
                        = self.extract_one(frames, self.det, bg_frames,
                                           std_outfile=std_outfile)
            # JFH TODO write out the background frame?

        # Return
        return sci_dict

    def get_sci_metadata(self, frame, det):
        """
        Grab the meta data for a given science frame and specific detector

        Args:
            frame (int): Frame index
            det (int): Detector index

        Returns:
            5 objects are returned::
                - str: Object type;  science or standard
                - str: Setup string from master_key()
                - astropy.time.Time: Time of observation
                - str: Basename of the frame
                - str: Binning of the detector

        """

        # Set binning, obstime, basename, and objtype
        binning = self.fitstbl['binning'][frame]
        obstime = self.fitstbl.construct_obstime(frame)
        basename = self.fitstbl.construct_basename(frame, obstime=obstime)
        objtype = self.fitstbl['frametype'][frame]
        if 'science' in objtype:
            objtype_out = 'science'
        elif 'standard' in objtype:
            objtype_out = 'standard'
        else:
            msgs.error('Unrecognized objtype')
        setup = self.fitstbl.master_key(frame, det=det)
        return objtype_out, setup, obstime, basename, binning

    def get_std_trace(self, std_redux, det, std_outfile):
        """
        Returns the trace of the standard if it is applicable to the current reduction

        Args:
            std_redux (bool): If False, proceed
            det (int): Detector index
            std_outfile (str): Filename for the standard star spec1d file

        Returns:
            ndarray or None: Trace of the standard star on input detector

        """
        if std_redux is False and std_outfile is not None:
            sobjs = specobjs.SpecObjs.from_fitsfile(std_outfile)
            # Does the detector match?
            # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we
            # we would use the standard from another detector
            this_det = sobjs.DET == det
            if np.any(this_det):
                sobjs_det = sobjs[this_det]
                sobjs_std = sobjs_det.get_std()
                # No standard extracted on this detector??
                if sobjs_std is None:
                    return None
                std_trace = sobjs_std.TRACE_SPAT
                # flatten the array if this multislit
                if 'MultiSlit' in self.spectrograph.pypeline:
                    std_trace = std_trace.flatten()
                elif 'Echelle' in self.spectrograph.pypeline:
                    std_trace = std_trace.T
                else:
                    msgs.error('Unrecognized pypeline')
            else:
                std_trace = None
        else:
            std_trace = None

        return std_trace

    def extract_one(self, frames, det, bg_frames, std_outfile=None):
        """
        Extract a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Args:
            frames (list):
                List of frames to extract;  stacked if more than one is provided
            det (int):
            bg_frames (list):
                List of frames to use as the background
                Can be empty
            std_outfile (str, optional):

        Returns:
            seven objects are returned::
                - ndarray: Science image
                - ndarray: Science inverse variance image
                - ndarray: Model of the sky
                - ndarray: Model of the object
                - ndarray: Model of inverse variance
                - ndarray: Mask
                - :obj:`pypeit.specobjs.SpecObjs`: spectra

        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning = self.get_sci_metadata(
            frames[0], det)
        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype
        # Get the standard trace if need be
        std_trace = self.get_std_trace(self.std_redux, det, std_outfile)

        # Build Science image
        sci_files = self.fitstbl.frame_paths(frames)
        self.sciImg = scienceimage.build_from_file_list(
            self.spectrograph,
            det,
            self.par['scienceframe']['process'],
            self.caliBrate.msbpm,
            sci_files,
            self.caliBrate.msbias,
            self.caliBrate.mspixelflat,
            illum_flat=self.caliBrate.msillumflat)

        # Background Image?
        if len(bg_frames) > 0:
            bg_file_list = self.fitstbl.frame_paths(bg_frames)
            self.sciImg = self.sciImg - scienceimage.build_from_file_list(
                self.spectrograph,
                det,
                self.par['scienceframe']['process'],
                self.caliBrate.msbpm,
                bg_file_list,
                self.caliBrate.msbias,
                self.caliBrate.mspixelflat,
                illum_flat=self.caliBrate.msillumflat)

        # Update mask for slitmask
        slitmask = pixels.tslits2mask(self.caliBrate.tslits_dict)
        self.sciImg.update_mask_slitmask(slitmask)

        # For QA on crash
        msgs.sciexp = self.sciImg

        # Instantiate Reduce object
        self.maskslits = self.caliBrate.tslits_dict['maskslits'].copy()
        # Required for pypeline specific object
        # TODO -- caliBrate should be replaced by the ~3 primary Objects needed
        #   once we have the data models in place.
        self.redux = reduce.instantiate_me(self.sciImg,
                                           self.spectrograph,
                                           self.par,
                                           self.caliBrate,
                                           maskslits=self.maskslits,
                                           ir_redux=self.ir_redux,
                                           std_redux=self.std_redux,
                                           objtype=self.objtype,
                                           setup=self.setup,
                                           show=self.show,
                                           det=det,
                                           binning=self.binning)
        # Show?
        if self.show:
            self.redux.show('image',
                            image=self.sciImg.image,
                            chname='processed',
                            slits=True,
                            clear=True)

        # Prep for manual extraction (if requested)
        manual_extract_dict = self.fitstbl.get_manual_extract(frames, det)

        self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs = self.redux.run(
            std_trace=std_trace,
            manual_extract_dict=manual_extract_dict,
            show_peaks=self.show,
            basename=self.basename,
            ra=self.fitstbl["ra"][frames[0]],
            dec=self.fitstbl["dec"][frames[0]],
            obstime=self.obstime)

        # Return
        return self.sciImg.image, self.sciImg.ivar, self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs

    # TODO: Why not use self.frame?
    def save_exposure(self, frame, sci_dict, basename):
        """
        Save the outputs from extraction for a given exposure

        Args:
            frame (:obj:`int`):
              0-indexed row in the metadata table with the frame that
              has been reduced.
            sci_dict (:obj:`dict`):
              Dictionary containing the primary outputs of extraction
            basename (:obj:`str`):
                The root name for the output file.

        Returns:
            None or SpecObjs:  All of the objects saved to disk

        """
        # TODO: Need some checks here that the exposure has been reduced

        # Determine the headers
        head1d = self.fitstbl[frame]
        # Need raw file header information
        rawfile = self.fitstbl.frame_paths(frame)
        head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext)
        refframe = 'pixel' if self.caliBrate.par['wavelengths']['reference'] == 'pixel' else \
            self.caliBrate.par['wavelengths']['frame']

        # Determine the paths/filenames
        save.save_all(sci_dict,
                      self.caliBrate.master_key_dict,
                      self.caliBrate.master_dir,
                      self.spectrograph,
                      head1d,
                      head2d,
                      self.science_path,
                      basename,
                      update_det=self.par['rdx']['detnum'],
                      binning=self.fitstbl['binning'][frame])

    def msgs_reset(self):
        """
        Reset the msgs object
        """

        # Reset the global logger
        msgs.reset(log=self.logname, verbosity=self.verbosity)
        msgs.pypeit_file = self.pypeit_file

    def print_end_time(self):
        """
        Print the elapsed time
        """
        # Capture the end time and print it to user
        tend = time.time()
        codetime = tend - self.tstart
        if codetime < 60.0:
            msgs.info('Execution time: {0:.2f}s'.format(codetime))
        elif codetime / 60.0 < 60.0:
            mns = int(codetime / 60.0)
            scs = codetime - 60.0 * mns
            msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs))
        else:
            hrs = int(codetime / 3600.0)
            mns = int(60.0 * (codetime / 3600.0 - hrs))
            scs = codetime - 60.0 * mns - 3600.0 * hrs
            msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format(
                hrs, mns, scs))

    # TODO: Move this to fitstbl?
    def show_science(self):
        """
        Simple print of science frames
        """
        indx = self.fitstbl.find_frames('science')
        print(self.fitstbl[['target', 'ra', 'dec', 'exptime',
                            'dispname']][indx])

    def __repr__(self):
        # Generate sets string
        return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__,
                                               self.pypeit_file)
Ejemplo n.º 22
0
    def __init__(self,
                 pypeit_file,
                 verbosity=2,
                 overwrite=True,
                 reuse_masters=False,
                 logname=None,
                 show=False,
                 redux_path=None):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups \
                = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name,
                                              ifile=data_files[0])
        msgs.info('Loaded spectrograph {0}'.format(
            self.spectrograph.spectrograph))

        # --------------------------------------------------------------
        # Get the full set of PypeIt parameters
        #   - Grab a science or standard file for configuration specific parameters
        scistd_file = None
        for idx, row in enumerate(usrdata):
            if ('science' in row['frametype']) or ('standard'
                                                   in row['frametype']):
                scistd_file = data_files[idx]
                break
        #   - Configuration specific parameters for the spectrograph
        if scistd_file is not None:
            msgs.info(
                'Setting configuration-specific parameters using {0}'.format(
                    os.path.split(scistd_file)[1]))
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(
            scistd_file).to_config()
        #   - Build the full set, merging with any user-provided
        #     parameters
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines,
                                            merge_with=cfg_lines)
        msgs.info('Built full PypeIt parameter set.')

        # Check the output paths are ready
        if redux_path is not None:
            self.par['rdx']['redux_path'] = redux_path

        # TODO: Write the full parameter set here?
        # --------------------------------------------------------------

        # --------------------------------------------------------------
        # Build the meta data
        #   - Re-initilize based on the file data
        msgs.info('Compiling metadata')
        self.fitstbl = PypeItMetaData(self.spectrograph,
                                      self.par,
                                      files=data_files,
                                      usrdata=usrdata,
                                      strict=True)
        #   - Interpret automated or user-provided data from the PypeIt
        #   file
        self.fitstbl.finalize_usr_build(frametype, setups[0])
        # --------------------------------------------------------------
        #   - Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite

        # Currently the runtime argument determines the behavior for
        # reuse_masters.
        self.reuse_masters = reuse_masters
        self.show = show

        # Set paths
        if self.par['calibrations']['caldir'] == 'default':
            self.calibrations_path = os.path.join(
                self.par['rdx']['redux_path'], 'Masters')
        else:
            self.calibrations_path = self.par['calibrations']['caldir']

        # Report paths
        msgs.info('Setting reduction path to {0}'.format(
            self.par['rdx']['redux_path']))
        msgs.info('Master calibration data output to: {0}'.format(
            self.calibrations_path))
        msgs.info('Science data output to: {0}'.format(self.science_path))
        msgs.info('Quality assessment plots output to: {0}'.format(
            self.qa_path))
        # TODO: Is anything written to the qa dir or only to qa/PNGs?
        # Should we have separate calibration and science QA
        # directories?

        # Instantiate Calibrations class
        self.caliBrate \
            = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'],
                                                 self.spectrograph,
                                                 caldir=self.calibrations_path,
                                                 qadir=self.qa_path,
                                                 reuse_masters=self.reuse_masters,
                                                 show=self.show)
        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.frame = None
        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None
Ejemplo n.º 23
0
class PypeIt(object):
    """
    This class runs the primary calibration and extraction in PypeIt

    Args:
        pypeit_file (:obj:`str`):  PypeIt filename
        verbosity (:obj:`int`, optional):
            Verbosity level of system output.  Can be::
                - 0: No output
                - 1: Minimal output (default)
                - 2: All output
        overwrite (:obj:`bool`, optional):
            Flag to overwrite any existing files/directories.
        reuse_masters (bool, optional): Reuse any pre-existing calibration files
        logname (:obj:`str`, optional):
            The name of an ascii log file with the details of the
            reduction.
        redux_path (:obj:`str`, optional):
            Over-ride reduction path in PypeIt file (e.g. Notebook usage)
        show: (:obj:`bool`, optional):
            Show reduction steps via plots (which will block further
            execution until clicked on) and outputs to ginga. Requires
            remote control ginga session via "ginga --modules=RC &"

    Attributes:
        pypeit_file (:obj:`str`):
            Name of the pypeit file to read.  PypeIt files have a specific
            set of valid formats. A description can be found `here`_
            (include doc link).
        fitstbl (:obj:`pypit.metadata.PypeItMetaData`): holds the meta info
    """
#    __metaclass__ = ABCMeta

    def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None,
                 show=False, redux_path=None):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)

        # Par
        # Defaults
        spectrograph_def_par = self.spectrograph.default_pypeit_par()
        # Grab a science file for configuration specific parameters
        sci_file = None
        for idx, row in enumerate(usrdata):
            if 'science' in row['frametype']:
                sci_file = data_files[idx]
                break

        # Set
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(spectrograph_def_par, sci_file).to_config()
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

        # Fitstbl
        self.fitstbl = PypeItMetaData(self.spectrograph, self.par, file_list=data_files,
                                      usrdata=usrdata, strict=True)

        # The following could be put in a prepare_to_run() method in PypeItMetaData
        if 'setup' not in self.fitstbl.keys():
            self.fitstbl['setup'] = setups[0]
        self.fitstbl.get_frame_types(user=frametype)  # This sets them using the user inputs
        self.fitstbl.set_defaults()  # Only does something if values not set in PypeIt file
        self.fitstbl._set_calib_group_bits()
        self.fitstbl._check_calib_groups()
        # Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite
        # Currently the runtime argument determines the behavior for reuse_masters. There is also a reuse_masters
        # parameter in the parset but it is currently ignored.
        self.reuse_masters=reuse_masters
        self.show = show

        # Make the output directories
        self.par['rdx']['redux_path'] = os.getcwd() if redux_path is None else redux_path
        msgs.info("Setting reduction path to {:s}".format(self.par['rdx']['redux_path']))
        paths.make_dirs(self.spectrograph.spectrograph, self.par['calibrations']['caldir'],
                        self.par['rdx']['scidir'], self.par['rdx']['qadir'],
                        overwrite=self.overwrite, redux_path=self.par['rdx']['redux_path'])

        # Instantiate Calibrations class
        self.caliBrate \
            = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], self.spectrograph,
                                                 redux_path=self.par['rdx']['redux_path'],
                                                 reuse_masters=self.reuse_masters,
                                                 save_masters=True, write_qa=True,
                                                 show=self.show)
        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.frame = None
        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None

    def build_qa(self):
        """
        Generate QA wrappers
        """
        qa.gen_mf_html(self.pypeit_file)
        qa.gen_exp_html()

    def outfile_exists(self, frame):
        """
        Check whether the 2D outfile of a given frame already exists

        Args:
            frame (int): Frame index from fitstbl

        Returns:
            bool: True if the 2d file exists
                 False if it does not exist
        """
        # Check if the 2d output file exists
        scidir = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'])
        basename = self.fitstbl.construct_basename(frame)
        outfile = scidir + '/spec2d_{:s}.fits'.format(basename)
        return os.path.isfile(outfile)

    def get_std_outfile(self, standard_frames):
        """
        Grab the output filename from an input list of standard_frame indices

        If more than one index is provided, the first is taken

        Args:
            standard_frames (list): List of indices corresponding to standard stars

        Returns:
            str: Full path to the standard spec1d output file

        """
        # TODO: Need to decide how to associate standards with
        # science frames in the case where there is more than one
        # standard associated with a given science frame.  Below, I
        # just use the first standard

        std_outfile = None
        std_frame = None if len(standard_frames) == 0 else standard_frames[0]
        # Prepare to load up standard?
        if std_frame is not None:
            std_outfile = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'],
            'spec1d_{:s}.fits'.format(self.fitstbl.construct_basename(std_frame))) \
            if isinstance(std_frame, (int,np.integer)) else None

        if std_outfile is not None and not os.path.isfile(std_outfile):
            msgs.error('Could not find standard file: {0}'.format(std_outfile))

        return std_outfile

    def reduce_all(self):
        """
        Main driver of the entire reduction

        Calibration and extraction via a series of calls to reduce_exposure()

        """
        # Validate the parameter set
        required = ['rdx', 'calibrations', 'scienceframe', 'scienceimage', 'flexure', 'fluxcalib']
        can_be_None = ['flexure', 'fluxcalib']
        self.par.validate_keys(required=required, can_be_None=can_be_None)

        self.tstart = time.time()

        # Find the standard frames
        is_standard = self.fitstbl.find_frames('standard')

        # Find the science frames
        is_science = self.fitstbl.find_frames('science')

        # Frame indices
        frame_indx = np.arange(len(self.fitstbl))

        # Iterate over each calibration group and reduce the standards
        for i in range(self.fitstbl.n_calib_groups):

            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the standard frames in this calibration group:
            grp_standards = frame_indx[is_standard & in_grp]

            # Reduce all the standard frames, loop on unique comb_id
            u_combid_std= np.unique(self.fitstbl['comb_id'][grp_standards])
            for j, comb_id in enumerate(u_combid_std):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    std_dict = self.reduce_exposure(frames, bg_frames=bg_frames)
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], std_dict, self.basename)
                else:
                    msgs.info('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) +
                              '. Set overwrite=True to recreate and overwrite.')

        # Iterate over each calibration group again and reduce the science frames
        for i in range(self.fitstbl.n_calib_groups):
            # Find all the frames in this calibration group
            in_grp = self.fitstbl.find_calib_group(i)

            # Find the indices of the science frames in this calibration group:
            grp_science = frame_indx[is_science & in_grp]
            # Associate standards (previously reduced above) for this setup
            std_outfile = self.get_std_outfile(frame_indx[is_standard])
            # Reduce all the science frames; keep the basenames of the science frames for use in flux calibration
            science_basename = [None]*len(grp_science)
            # Loop on unique comb_id
            u_combid = np.unique(self.fitstbl['comb_id'][grp_science])
            for j, comb_id in enumerate(u_combid):
                frames = np.where(self.fitstbl['comb_id'] == comb_id)[0]
                bg_frames = np.where(self.fitstbl['bkg_id'] == comb_id)[0]
                if not self.outfile_exists(frames[0]) or self.overwrite:
                    sci_dict = self.reduce_exposure(frames, bg_frames=bg_frames, std_outfile=std_outfile)
                    science_basename[j] = self.basename
                    # TODO come up with sensible naming convention for save_exposure for combined files
                    self.save_exposure(frames[0], sci_dict, self.basename)
                else:
                    msgs.warn('Output file: {:s} already exists'.format(self.fitstbl.construct_basename(frames[0])) +
                              '. Set overwrite=True to recreate and overwrite.')

            msgs.info('Finished calibration group {0}'.format(i))

        # Finish
        self.print_end_time()


    def select_detectors(self):
        """
        Return the 1-indexed list of detectors to reduce.

        Returns:
            list:  List of detectors to be reduced

        """
        if self.par['rdx']['detnum'] is None:
            return np.arange(self.spectrograph.ndet)+1
        return [self.par['rdx']['detnum']] if isinstance(self.par['rdx']['detnum'], int) \
                    else self.par['rdx']['detnum']

    def reduce_exposure(self, frames, bg_frames=[], std_outfile=None):
        """
        Reduce a single exposure

        Args:
            frame (:obj:`int`):
                0-indexed row in :attr:`fitstbl` with the frame to
                reduce
            bgframes (:obj:`list`, optional):
                List of frame indices for the background
            std_outfile (:obj:`str`, optional):
                the name of a file with a previously PypeIt-reduced standard spectrum.

        Returns:
            dict: The dictionary containing the primary outputs of extraction
        """
        # if show is set, clear the ginga channels at the start of each new sci_ID
        if self.show:
            ginga.clear_all()

        # Save the frame
        self.frames = frames
        self.bg_frames = bg_frames
        # Is this an IR reduction?
        self.ir_redux = True if len(bg_frames) > 0 else False

        # JFH Why does this need to be ordered?
        sci_dict = OrderedDict()  # This needs to be ordered
        sci_dict['meta'] = {}
        sci_dict['meta']['vel_corr'] = 0.
        sci_dict['meta']['ir_redux'] = self.ir_redux

        # Print status message
        msgs_string = 'Reducing target {:s}'.format(self.fitstbl['target'][self.frames[0]]) + msgs.newline()
        msgs_string += 'Combining frames:' + msgs.newline()
        for iframe in self.frames:
            msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
        msgs.info(msgs_string)
        if len(bg_frames) > 0:
            bg_msgs_string = ''
            for iframe in self.bg_frames:
                bg_msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
            bg_msgs_string = msgs.newline() + 'Using background from frames:' + msgs.newline() + bg_msgs_string
            msgs.info(bg_msgs_string)

        # Find the detectors to reduce
        detectors = self.select_detectors()
        if len(detectors) != self.spectrograph.ndet:
            msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in 
                                set(np.arange(self.spectrograph.ndet))-set(detectors)])))

        # Loop on Detectors
        for self.det in detectors:
            msgs.info("Working on detector {0}".format(self.det))
            sci_dict[self.det] = {}
            # Calibrate
            #TODO Is the right behavior to just use the first frame?
            self.caliBrate.set_config(self.frames[0], self.det, self.par['calibrations'])
            self.caliBrate.run_the_steps()
            # Extract
            # TODO: pass back the background frame, pass in background
            # files as an argument. extract one takes a file list as an
            # argument and instantiates science within
            sci_dict[self.det]['sciimg'], sci_dict[self.det]['sciivar'], sci_dict[self.det]['skymodel'], \
                sci_dict[self.det]['objmodel'], sci_dict[self.det]['ivarmodel'], sci_dict[self.det]['outmask'], \
                sci_dict[self.det]['specobjs'], vel_corr \
                    = self.extract_one(self.frames, self.det, bg_frames = self.bg_frames, std_outfile = std_outfile)
            if vel_corr is not None:
                sci_dict['meta']['vel_corr'] = vel_corr

            # JFH TODO write out the background frame?

        # Return
        return sci_dict

    def flexure_correct(self, sobjs, maskslits):
        """
        Correct for flexure

        Spectra are modified in place (wavelengths are shifted)

        Args:
            sobjs (SpecObjs):
            maskslits (ndarray): Mask of SpecObjs

        """

        if self.par['flexure']['method'] != 'skip':
            flex_list = wave.flexure_obj(sobjs, maskslits, self.par['flexure']['method'],
                                         self.par['flexure']['spectrum'],
                                         mxshft=self.par['flexure']['maxshift'])
            # QA
            wave.flexure_qa(sobjs, maskslits, self.basename, self.det, flex_list,
                            out_dir=self.par['rdx']['redux_path'])
        else:
            msgs.info('Skipping flexure correction.')

    def helio_correct(self, sobjs, maskslits, frame, obstime):
        """
        Perform a heliocentric correction on a set of spectra

        Args:
            sobjs (pypeit.specobjs.SpecObjs): Spectra
            maskslits (ndarray): Slits that are masked
            frame (int): Frame to use for meta info
            obstime (astropy.time.Time):

        Returns:
            astropy.units.Quantity: Velocity correction in km/s

        """
        # Helio, correct Earth's motion
        if (self.caliBrate.par['wavelengths']['frame'] in ['heliocentric', 'barycentric']) \
                and (self.caliBrate.par['wavelengths']['reference'] != 'pixel'):
            # TODO change this keyword to refframe instead of frame
            msgs.info("Performing a {0} correction".format(self.caliBrate.par['wavelengths']['frame']))
            vel, vel_corr = wave.geomotion_correct(sobjs, maskslits, self.fitstbl, frame, obstime,
                                                   self.spectrograph.telescope['longitude'],
                                                   self.spectrograph.telescope['latitude'],
                                                   self.spectrograph.telescope['elevation'],
                                                   self.caliBrate.par['wavelengths']['frame'])
        else:
            msgs.info('A wavelength reference-frame correction will not be performed.')
            vel_corr = None

        return vel_corr

    def get_sci_metadata(self, frame, det):
        """
        Grab the meta data for a given science frame and specific detector

        Args:
            frame (int): Frame index
            det (int): Detector index

        Returns:
            5 objects are returned::
                - str: Object type;  science or standard
                - str: Setup string from master_key()
                - astropy.time.Time: Time of observation
                - str: Basename of the frame
                - str: Binning of the detector

        """

        # Set binning, obstime, basename, and objtype
        binning = self.fitstbl['binning'][frame]
        obstime  = self.fitstbl.construct_obstime(frame)
        basename = self.fitstbl.construct_basename(frame, obstime=obstime)
        objtype  = self.fitstbl['frametype'][frame]
        if 'science' in objtype:
            objtype_out = 'science'
        elif 'standard' in objtype:
            objtype_out = 'standard'
        else:
            msgs.error('Unrecognized objtype')
        setup = self.fitstbl.master_key(frame, det=det)
        return objtype_out, setup, obstime, basename, binning

    def get_std_trace(self, std_redux, det, std_outfile):
        """
        Returns the trace of the standard if it is applicable to the current reduction

        Args:
            std_redux (bool): If False, proceed
            det (int): Detector index
            std_outfile (str): Filename for the standard star spec1d file

        Returns:
            ndarray: Trace of the standard star on input detector

        """
        if std_redux is False and std_outfile is not None:
            sobjs, hdr_std = load.load_specobjs(std_outfile)
            # Does the detector match?
            # TODO Instrument specific logic here could be implemented with the parset. For example LRIS-B or LRIS-R we
            # we would use the standard from another detector
            this_det = sobjs.det == det
            if np.any(this_det):
                sobjs_det = sobjs[this_det]
                sobjs_std = sobjs_det.get_std()
                std_trace = sobjs_std.trace_spat
                # flatten the array if this multislit
                if 'MultiSlit' in self.spectrograph.pypeline:
                    std_trace = std_trace.flatten()
                elif 'Echelle' in self.spectrograph.pypeline:
                    std_trace = std_trace.T
                else:
                    msgs.error('Unrecognized pypeline')
            else:
                std_trace = None
        else:
            std_trace = None

        return std_trace

    def extract_one(self, frames, det, bg_frames=[], std_outfile=None):
        """
        Extract a single exposure/detector pair

        sci_ID and det need to have been set internally prior to calling this method

        Args:
            frames (list):  List of frames to extract;  stacked if more than one is provided
            det (int):
            bg_frames (list, optional): List of frames to use as the background
            std_outfile (str, optional):

        Returns:
            eight objects are returned::
                - ndarray: Science image
                - ndarray: Science inverse variance image
                - ndarray: Model of the sky
                - ndarray: Model of the object
                - ndarray: Model of inverse variance
                - ndarray: Mask
                - :obj:`pypeit.specobjs.SpecObjs`: spectra
                - astropy.units.Quantity: velocity correction

        """
        # Grab some meta-data needed for the reduction from the fitstbl
        self.objtype, self.setup, self.obstime, self.basename, self.binning = self.get_sci_metadata(frames[0], det)
        # Is this a standard star?
        self.std_redux = 'standard' in self.objtype
        # Get the standard trace if need be
        std_trace = self.get_std_trace(self.std_redux, det, std_outfile)
        # Instantiate ScienceImage for the files we will reduce
        sci_files = self.fitstbl.frame_paths(frames)
        self.sciI = scienceimage.ScienceImage(self.spectrograph, sci_files,
                                              bg_file_list=self.fitstbl.frame_paths(bg_frames),
                                              ir_redux = self.ir_redux,
                                              par=self.par['scienceframe'],
                                              det=det,
                                              binning=self.binning)
        # For QA on crash.
        msgs.sciexp = self.sciI

        # Process images (includes inverse variance image, rn2 image, and CR mask)
        self.sciimg, self.sciivar, self.rn2img, self.mask, self.crmask = \
            self.sciI.proc(self.caliBrate.msbias, self.caliBrate.mspixflatnrm.copy(),
                           self.caliBrate.msbpm, illum_flat=self.caliBrate.msillumflat,
                           show=self.show)
        # Object finding, first pass on frame without sky subtraction
        self.maskslits = self.caliBrate.maskslits.copy()

        self.redux = reduce.instantiate_me(self.spectrograph, self.caliBrate.tslits_dict,
                                           self.mask, self.par,
                                           ir_redux = self.ir_redux,
                                           objtype=self.objtype, setup=self.setup,
                                           det=det, binning=self.binning)

        # Prep for manual extraction (if requested)
        manual_extract_dict = self.fitstbl.get_manual_extract(frames, det)

        # Do one iteration of object finding, and sky subtract to get initial sky model
        self.sobjs_obj, self.nobj, skymask_init = \
            self.redux.find_objects(self.sciimg, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux,
                                    std_trace=std_trace,maskslits=self.maskslits,
                                    show=self.show & (not self.std_redux),
                                    manual_extract_dict=manual_extract_dict)

        # Global sky subtraction, first pass. Uses skymask from object finding step above
        self.initial_sky = \
            self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], skymask=skymask_init,
                                    std=self.std_redux, maskslits=self.maskslits, show=self.show)

        if not self.std_redux:
            # Object finding, second pass on frame *with* sky subtraction. Show here if requested
            self.sobjs_obj, self.nobj, self.skymask = \
                self.redux.find_objects(self.sciimg - self.initial_sky, self.sciivar, std=self.std_redux, ir_redux=self.ir_redux,
                                  std_trace=std_trace,maskslits=self.maskslits,show=self.show,
                                        manual_extract_dict=manual_extract_dict)

        # If there are objects, do 2nd round of global_skysub, local_skysub_extract, flexure, geo_motion
        if self.nobj > 0:
            # Global sky subtraction second pass. Uses skymask from object finding
            self.global_sky = self.initial_sky if self.std_redux else \
                self.redux.global_skysub(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'],
                skymask=self.skymask, maskslits=self.maskslits, show=self.show)

            self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs = \
            self.redux.local_skysub_extract(self.sciimg, self.sciivar, self.caliBrate.tilts_dict['tilts'], self.caliBrate.mswave,
                                            self.global_sky, self.rn2img, self.sobjs_obj,
                                            model_noise=(not self.ir_redux),std = self.std_redux,
                                            maskslits=self.maskslits, show_profile=self.show,show=self.show)

            # Purge out the negative objects if this was a near-IR reduction.
            # TODO should we move this purge call to local_skysub_extract??
            if self.ir_redux:
                self.sobjs.purge_neg()

            # Flexure correction if this is not a standard star
            if not self.std_redux:
                self.redux.flexure_correct(self.sobjs, self.basename)

            # Grab coord
            radec = ltu.radec_to_coord((self.fitstbl["ra"][frames[0]], self.fitstbl["dec"][frames[0]]))
            self.vel_corr = self.redux.helio_correct(self.sobjs, radec, self.obstime)

        else:
            # Print status message
            msgs_string = 'No objects to extract for target {:s}'.format(self.fitstbl['target'][frames[0]]) + msgs.newline()
            msgs_string += 'On frames:' + msgs.newline()
            for iframe in frames:
                msgs_string += '{0:s}'.format(self.fitstbl['filename'][iframe]) + msgs.newline()
            msgs.warn(msgs_string)
            # set to first pass global sky
            self.skymodel = self.initial_sky
            self.objmodel = np.zeros_like(self.sciimg)
            # Set to sciivar. Could create a model but what is the point?
            self.ivarmodel = np.copy(self.sciivar)
            # Set to the initial mask in case no objects were found
            self.outmask = self.redux.mask
            # empty specobjs object from object finding
            if self.ir_redux:
                self.sobjs_obj.purge_neg()
            self.sobjs = self.sobjs_obj
            self.vel_corr = None

        return self.sciimg, self.sciivar, self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs, self.vel_corr

    # TODO: Why not use self.frame?
    def save_exposure(self, frame, sci_dict, basename):
        """
        Save the outputs from extraction for a given exposure

        Args:
            frame (:obj:`int`):
              0-indexed row in the metadata table with the frame that
              has been reduced.
            sci_dict (:obj:`dict`):
              Dictionary containing the primary outputs of extraction
            basename (:obj:`str`):
                The root name for the output file.

        Returns:
            None or SpecObjs:  All of the objects saved to disk

        """
        # TODO: Need some checks here that the exposure has been reduced

        # Determine the headers
        head1d = self.fitstbl[frame]
        # Need raw file header information
        rawfile = self.fitstbl.frame_paths(frame)
        head2d = fits.getheader(rawfile, ext=self.spectrograph.primary_hdrext,)
        refframe = 'pixel' if self.caliBrate.par['wavelengths']['reference'] == 'pixel' else \
            self.caliBrate.par['wavelengths']['frame']

        # Determine the paths/filenames
        scipath = os.path.join(self.par['rdx']['redux_path'], self.par['rdx']['scidir'])

        save.save_all(sci_dict, self.caliBrate.master_key_dict, self.caliBrate.master_dir, self.spectrograph,
                      head1d, head2d, scipath, basename, refframe=refframe,
                      update_det=self.par['rdx']['detnum'], binning=self.fitstbl['binning'][frame])

        return



    def msgs_reset(self):
        """
        Reset the msgs object
        """

        # Reset the global logger
        msgs.reset(log=self.logname, verbosity=self.verbosity)
        msgs.pypeit_file = self.pypeit_file

    def print_end_time(self):
        """
        Print the elapsed time
        """
        # Capture the end time and print it to user
        tend = time.time()
        codetime = tend-self.tstart
        if codetime < 60.0:
            msgs.info('Execution time: {0:.2f}s'.format(codetime))
        elif codetime/60.0 < 60.0:
            mns = int(codetime/60.0)
            scs = codetime - 60.0*mns
            msgs.info('Execution time: {0:d}m {1:.2f}s'.format(mns, scs))
        else:
            hrs = int(codetime/3600.0)
            mns = int(60.0*(codetime/3600.0 - hrs))
            scs = codetime - 60.0*mns - 3600.0*hrs
            msgs.info('Execution time: {0:d}h {1:d}m {2:.2f}s'.format(hrs, mns, scs))

    # TODO: Move this to fitstbl?
    def show_science(self):
        """
        Simple print of science frames
        """
        indx = self.fitstbl.find_frames('science')
        print(self.fitstbl[['target','ra','dec','exptime','dispname']][indx])

    def __repr__(self):
        # Generate sets string
        return '<{:s}: pypeit_file={}>'.format(self.__class__.__name__, self.pypeit_file)
Ejemplo n.º 24
0
    def __init__(self, pypeit_file, verbosity=2, overwrite=True, reuse_masters=False, logname=None,
                 show=False, redux_path=None):

        # Load
        cfg_lines, data_files, frametype, usrdata, setups = parse_pypeit_file(pypeit_file, runtime=True)
        self.pypeit_file = pypeit_file

        # Spectrograph
        cfg = ConfigObj(cfg_lines)
        spectrograph_name = cfg['rdx']['spectrograph']
        self.spectrograph = load_spectrograph(spectrograph_name)

        # Par
        # Defaults
        spectrograph_def_par = self.spectrograph.default_pypeit_par()
        # Grab a science file for configuration specific parameters
        sci_file = None
        for idx, row in enumerate(usrdata):
            if 'science' in row['frametype']:
                sci_file = data_files[idx]
                break

        # Set
        spectrograph_cfg_lines = self.spectrograph.config_specific_par(spectrograph_def_par, sci_file).to_config()
        self.par = PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=cfg_lines)

        # Fitstbl
        self.fitstbl = PypeItMetaData(self.spectrograph, self.par, file_list=data_files,
                                      usrdata=usrdata, strict=True)

        # The following could be put in a prepare_to_run() method in PypeItMetaData
        if 'setup' not in self.fitstbl.keys():
            self.fitstbl['setup'] = setups[0]
        self.fitstbl.get_frame_types(user=frametype)  # This sets them using the user inputs
        self.fitstbl.set_defaults()  # Only does something if values not set in PypeIt file
        self.fitstbl._set_calib_group_bits()
        self.fitstbl._check_calib_groups()
        # Write .calib file (For QA naming amongst other things)
        calib_file = pypeit_file.replace('.pypeit', '.calib')
        self.fitstbl.write_calib(calib_file)

        # Other Internals
        self.logname = logname
        self.overwrite = overwrite
        # Currently the runtime argument determines the behavior for reuse_masters. There is also a reuse_masters
        # parameter in the parset but it is currently ignored.
        self.reuse_masters=reuse_masters
        self.show = show

        # Make the output directories
        self.par['rdx']['redux_path'] = os.getcwd() if redux_path is None else redux_path
        msgs.info("Setting reduction path to {:s}".format(self.par['rdx']['redux_path']))
        paths.make_dirs(self.spectrograph.spectrograph, self.par['calibrations']['caldir'],
                        self.par['rdx']['scidir'], self.par['rdx']['qadir'],
                        overwrite=self.overwrite, redux_path=self.par['rdx']['redux_path'])

        # Instantiate Calibrations class
        self.caliBrate \
            = calibrations.MultiSlitCalibrations(self.fitstbl, self.par['calibrations'], self.spectrograph,
                                                 redux_path=self.par['rdx']['redux_path'],
                                                 reuse_masters=self.reuse_masters,
                                                 save_masters=True, write_qa=True,
                                                 show=self.show)
        # Init
        self.verbosity = verbosity
        # TODO: I don't think this ever used

        self.frame = None
        self.det = None

        self.tstart = None
        self.basename = None
        self.sciI = None
        self.obstime = None