Beispiel #1
0
    def save_master(self, data, outfile=None, raw_files=None, steps=None, overwrite=True, extensions=None, names=None):
        """

        Args:
            data (dict): wv_calib dict
            outfile (str, optional):
            raw_files:  Not used
            steps:  Not used
            overwrite:  Not used
            extensions:  Not used
            names:  Not used

        Returns:

        """

        _outfile = self.ms_name if outfile is None else outfile
        if os.path.exists(_outfile) and (not overwrite):
            msgs.warn("This file already exists.  Use overwrite=True to overwrite it")
            return
        #
        msgs.info("Saving master {0:s} frame as:".format(self.frametype) + msgs.newline() + _outfile)
        # Wavelength fit(s)

        # jsonify has the annoying property that it modifies the objects when it jsonifies them so make a copy,
        # which converts lists to arrays, so we make a copy
        data_for_json = copy.deepcopy(data)
        gddict = linetools.utils.jsonify(data_for_json)
        linetools.utils.savejson(_outfile, gddict, easy_to_read=True, overwrite=True)
        # Finish
        msgs.info("Master {0:s} frame saved successfully:".format(self.frametype) + msgs.newline() + _outfile)

        return
Beispiel #2
0
def key_list_allowed(v, allowed):
    """ Check that a keyword argument is a list. Set the
    appropriate type of the list based on the supplied values.
    Then, check that each value in the list is also in the
    supplied 'allowed' list.

    Parameters
    ----------
    v : str
      value of a keyword argument
    allowed : list
      list of allowed values that v can take

    Returns
    -------
    v : list
      A value used by the settings dictionary
    """
    ll = inspect.currentframe().f_back.f_code.co_name.split('_')
    func_name = "'" + " ".join(ll) + "'"
    v = key_list(v)
    for ll in v:
        if ll not in allowed:
            msgs.error("The allowed list does not include: {0:s}".format(ll) +
                       msgs.newline() + "Please choose one of the following:" +
                       msgs.newline() + ", ".join(allowed) + msgs.newline() +
                       "for the argument of {0:s}".format(func_name))
    return v
Beispiel #3
0
def check_deprecated(v, deprecated, upper=False):
    """ Check if a keyword argument is deprecated.

    Parameters
    ----------
    v : str
      value of a keyword argument
    deprecated : list
      list of deprecated values that v might be
    upper : bool (optional)
      If True, the allowed list is expected to contain only
      uppercase strings. If False, the allowed list is expected
      to contain only lowercase strings.

    Returns
    -------
    v : str
      A string used by the settings dictionary
    """
    ll = inspect.currentframe().f_back.f_code.co_name.split('_')
    func_name = "'" + " ".join(ll) + "'"
    if upper:
        v = v.upper()
    else:
        v = v.lower()
    if v in deprecated:
        msgs.error("The argument of {0:s} is deprecated.".format(func_name) +
                   msgs.newline() + "Please choose one of the following:" +
                   msgs.newline() + ", ".join(deprecated))
    return
Beispiel #4
0
def read_coaddfile(ifile):
    """
    Read a ``PypeIt`` coadd1d file, akin to a standard PypeIt file.

    The top is a config block that sets ParSet parameters.  The name of the
    spectrograph is required.

    Args:
        ifile (:obj:`str`):
            Name of the coadd file

    Returns:
        :obj:`tuple`:  Three objects are returned: a :obj:`list` with the
        configuration entries used to modify the relevant
        :class:`~pypeit.par.parset.ParSet` parameters, a :obj:`list` the names
        of spec1d files to be coadded, and a :obj:`list` with the object IDs
        aligned with each of the spec1d files.
    """
    # Read in the pypeit reduction file
    msgs.info('Loading the coadd1d file')
    lines = par.util._read_pypeit_file_lines(ifile)
    is_config = np.ones(len(lines), dtype=bool)


    # Parse the fluxing block
    spec1dfiles = []
    objids_in = []
    s, e = par.util._find_pypeit_block(lines, 'coadd1d')
    if s >= 0 and e < 0:
        msgs.error("Missing 'coadd1d end' in {0}".format(ifile))
    elif (s < 0) or (s==e):
        msgs.error("Missing coadd1d read or [coadd1d] block in in {0}. Check the input format for the .coadd1d file".format(ifile))
    else:
        for ctr, line in enumerate(lines[s:e]):
            prs = line.split(' ')
            spec1dfiles.append(prs[0])
            if ctr == 0 and len(prs) != 2:
                msgs.error('Invalid format for .coadd1d file.' + msgs.newline() +
                           'You must have specify a spec1dfile and objid on the first line of the coadd1d block')
            if len(prs) > 1:
                objids_in.append(prs[1])
        is_config[s-1:e+1] = False

    # Chck the sizes of the inputs
    nspec = len(spec1dfiles)
    if len(objids_in) == 1:
        objids = nspec*objids_in
    elif len(objids_in) == nspec:
        objids = objids_in
    else:
        msgs.error('Invalid format for .flux file.' + msgs.newline() +
                   'You must specify a single objid on the first line of the coadd1d block,' + msgs.newline() +
                   'or specify am objid for every spec1dfile in the coadd1d block.' + msgs.newline() +
                   'Run pypeit_coadd_1dspec --help for information on the format')
    # Construct config to get spectrograph
    cfg_lines = list(lines[is_config])

    # Return
    return cfg_lines, spec1dfiles, objids
Beispiel #5
0
def load_ordloc(fname):
    # Load the files
    mstrace_bname, mstrace_bext = os.path.splitext(fname)
    lname = mstrace_bname + "_ltrace" + mstrace_bext
    rname = mstrace_bname + "_rtrace" + mstrace_bext
    # Load the order locations
    ltrace = np.array(fits.getdata(lname, 0), dtype=np.float)
    msgs.info("Loaded left order locations for frame:" + msgs.newline() +
              fname)
    rtrace = np.array(fits.getdata(rname, 0), dtype=np.float)
    msgs.info("Loaded right order locations for frame:" + msgs.newline() +
              fname)
    return ltrace, rtrace
Beispiel #6
0
    def main(args):
        """ Runs fluxing steps
        """
        # Load the file
        config_lines, spec1dfiles, sensfiles_in = read_fluxfile(args.flux_file)
        # Read in spectrograph from spec1dfile header
        header = fits.getheader(spec1dfiles[0])
        spectrograph = load_spectrograph(header['PYP_SPEC'])

        # Parameters
        spectrograph_def_par = spectrograph.default_pypeit_par()
        par = pypeitpar.PypeItPar.from_cfg_lines(
            cfg_lines=spectrograph_def_par.to_config(),
            merge_with=config_lines)
        # Write the par to disk
        print("Writing the parameters to {}".format(args.par_outfile))
        par.to_config(args.par_outfile)

        # Chck the sizes of the inputs
        nspec = len(spec1dfiles)
        if len(sensfiles_in) == 1:
            # Use one sens file for each spec1d
            sensfiles = nspec * sensfiles_in
        elif len(sensfiles_in) == nspec:
            # One sensfile specified / spec1d
            sensfiles = sensfiles_in
        elif len(sensfiles_in
                 ) == 0 and par['fluxcalib']['use_archived_sens'] == True:
            # No sensfile specified, but an archived sensfunc can be used.
            sf_archive = SensFileArchive.get_instance(spectrograph.name)
            sensfiles = nspec * [
                sf_archive.get_archived_sensfile(spec1dfiles[0])
            ]
        else:
            msgs.error(
                'Invalid format for .flux file.' + msgs.newline() +
                'You must specify a single sensfile on the first line of the flux block,'
                + msgs.newline() +
                'or specify a  sensfile for every spec1dfile in the flux block,'
                + msgs.newline() +
                'or specify "use_archived_sens = True" to use an archived sensfile.'
                + msgs.newline() +
                'Run pypeit_flux_calib --help for information on the format')

        # Instantiate
        FxCalib = fluxcalibrate.FluxCalibrate.get_instance(
            spec1dfiles, sensfiles, par=par['fluxcalib'], debug=args.debug)
        msgs.info('Flux calibration complete')
        return 0
Beispiel #7
0
    def save(self, outfile=None, overwrite=True):
        """
        Save the bias master data.

        Args:
            outfile (:obj:`str`, optional):
                Name for the output file.  Defaults to
                :attr:`file_path`.
            overwrite (:obj:`bool`, optional):
                Overwrite any existing file.
        """
        # Some checks
        if self.pypeitImage is None:
            msgs.warn('No MasterBias to save!')
            return
        if not self.pypeitImage.validate():
            msgs.warn('MasterBias is not a proper image.')
            return
        # Proceed
        _outfile = self.master_file_path if outfile is None else outfile
        # Check if it exists
        if os.path.exists(_outfile) and not overwrite:
            msgs.warn('Master file exists: {0}'.format(_outfile) + msgs.newline()
                      + 'Set overwrite=True to overwrite it.')
            return
        # Save
        hdr = self.build_master_header(steps=self.process_steps, raw_files=self.file_list)
        self.pypeitImage.write(_outfile, hdr=hdr, iext='BIAS')
        msgs.info('Master frame written to {0}'.format(_outfile))
Beispiel #8
0
    def get_headarr(self, inp, strict=True):
        """
        Read the header data from all the extensions in the file.

        Args:
            inp (:obj:`str`, `astropy.io.fits.HDUList`_):
                Name of the file to read or the previously opened HDU list.
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to read
                any of the headers. Set to False to report a warning and
                continue.

        Returns:
            :obj:`list`: A list of `astropy.io.fits.Header`_ objects with the
            extension headers.
        """
        # Faster to open the whole file and then assign the headers,
        # particularly for gzipped files (e.g., DEIMOS)
        if isinstance(inp, str):
            try:
                hdu = io.fits_open(inp)
            except:
                if strict:
                    msgs.error('Problem opening {0}.'.format(inp))
                else:
                    msgs.warn(
                        'Problem opening {0}.'.format(inp) + msgs.newline() +
                        'Proceeding, but should consider removing this file!')
                    return ['None'] * 999  # self.numhead
        else:
            hdu = inp
        return [hdu[k].header for k in range(len(hdu))]
Beispiel #9
0
    def apply_spectral_flexure(self, shift, sky_spec):
        """
        Apply interpolation with the flexure dict

        Args:
            shift (float):
                additive spectral flexure in pixels
            sky_spec (`linetools.spectra.xspectrum1d.XSpectrum1D`_):
                Sky Spectrum

        Returns:
            `linetools.spectra.xspectrum1d.XSpectrum1D`_: New sky
            spectrum (mainly for QA)
        """
        # Simple interpolation to apply
        # Apply
        for attr in ['BOX', 'OPT']:
            if self[attr + '_WAVE'] is not None:
                msgs.info(
                    "Applying flexure correction to {0:s} extraction for object:"
                    .format(attr) + msgs.newline() +
                    "{0:s}".format(str(self.NAME)))
                self[attr + '_WAVE'] = flexure.flexure_interp(
                    shift, self[attr + '_WAVE']).copy()
        # Shift sky spec too
        twave = flexure.flexure_interp(shift,
                                       sky_spec.wavelength.value) * units.AA
        new_sky = xspectrum1d.XSpectrum1D.from_tuple((twave, sky_spec.flux))
        # Save - since flexure may have been applied/calculated twice, this needs to be additive
        self.update_flex_shift(shift, flex_type='local')
        # Return
        return new_sky
    def load_master(self, filename, force=False):

        # Does the master file exist?
        if not os.path.isfile(filename):
            # msgs.warn("No Master frame found of type {:s}: {:s}".format(self.frametype, filename))
            msgs.warn("No Master frame found of {:s}".format(filename))
            if force:
                msgs.error("Crashing out because reduce-masters-force=True:" + msgs.newline() + filename)
            return None
        else:
            # msgs.info("Loading a pre-existing master calibration frame of type: {:}".format(self.frametype) + " from filename: {:}".format(filename))
            msgs.info("Loading a pre-existing master calibration frame of SENSFUNC from filename: {:}".format(filename))

            hdu = fits.open(filename)
            norder = hdu[0].header['NORDER']
            sens_dicts = {}
            for iord in range(norder):
                head = hdu[iord + 1].header
                tbl = hdu['SENSFUNC-ORDER{0:04}'.format(iord)].data
                sens_dict = {}
                sens_dict['wave'] = tbl['WAVE']
                sens_dict['sensfunc'] = tbl['SENSFUNC']
                for key in ['wave_min', 'wave_max', 'exptime', 'airmass', 'std_file', 'std_ra', 'std_dec',
                            'std_name', 'cal_file', 'ech_orderindx']:
                    try:
                        sens_dict[key] = head[key.upper()]
                    except:
                        pass
                sens_dicts[str(iord)] = sens_dict
            sens_dicts['norder'] = norder
            return sens_dicts
Beispiel #11
0
    def get_headarr(self, filename, strict=True):
        """
        Read the header data from all the extensions in the file.

        Args:
            filename (:obj:`str`):
                Name of the file to read.
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read any of the headers.  Set to False to report a
                warning and continue.

        Returns:
            list: Returns a list of :attr:`numhead` :obj:`fits.Header`
            objects with the extension headers.
        """
        # Faster to open the whole file and then assign the headers,
        # particularly for gzipped files (e.g., DEIMOS)
        try:
            hdu = fits.open(filename)
        except:
            if strict:
                msgs.error('Problem opening {0}.'.format(filename))
            else:
                msgs.warn('Problem opening {0}.'.format(filename) + msgs.newline()
                          + 'Proceeding, but should consider removing this file!')
                return ['None']*self.numhead
        return [ hdu[k].header for k in range(self.numhead) ]
Beispiel #12
0
def save_wavelength_calibration(outfile, wv_calib, overwrite=True):
    """
    Save a wavelength solution to a file.

    Args:
        outfile (:obj:`str`):
            Name for the output file.
        wv_calib (:obj:`dict`):
            Dictionary with the wavelength solution.  TODO: Document
            the format of this dictionary!
        overwrite (:obj:`bool`, optional):
            Overwrite any existing file.
    """
    # Check if it exists
    if os.path.exists(outfile) and not overwrite:
        # TODO: Should this throw an error instead?
        msgs.warn('File exists: {0}'.format(outfile) + msgs.newline()
                  + 'Set overwrite=True to overwrite it.')
        return

    # jsonify has the annoying property that it modifies the objects
    # when it jsonifies them so make a copy, which converts lists to
    # arrays, so we make a copy
    data_for_json = copy.deepcopy(wv_calib)
    gddict = linetools.utils.jsonify(data_for_json)
    linetools.utils.savejson(outfile, gddict, easy_to_read=True, overwrite=True)
Beispiel #13
0
    def compound_meta(self, headarr: List[fits.Header], meta_key: str):
        """
        Methods to generate meta in a more complex manner than simply
        reading from the header.

        mjd is converted from UTSHUT header
        dispangle is parsed from ANGLE header

        Args:
            headarr: List[fits.Header]
              List of headers
            meta_key: str

        Returns:
            value:

        """
        if meta_key == 'mjd':
            return Time(headarr[0]['UTSHUT']).mjd
        elif meta_key == 'dispangle':
            try:
                return Angle(headarr[0]['ANGLE']).deg
            except Exception as e:
                msgs.warn("Could not read dispangle from header:" + msgs.newline() + str(headarr[0]['ANGLE']))
                raise e
        else:
            return None
Beispiel #14
0
    def save(self, outfile=None, overwrite=True):
        """
        Save the alignment traces to a master frame.

        Args:
            outfile (:obj:`str`, optional):
                Name of the output file.  Defaults to :attr:`master_file_path`.
            overwrite (:obj:`bool`, optional):
                Overwrite any existing file.
        """
        _outfile = self.master_file_path if outfile is None else outfile
        # Check if it exists
        if os.path.exists(_outfile) and not overwrite:
            msgs.warn('Master file exists: {0}'.format(_outfile) +
                      msgs.newline() + 'Set overwrite=True to overwrite it.')
            return

        # Report and save
        prihdr = self.build_master_header(steps=self.steps)
        #   - Add the binning
        prihdr['BINNING'] = (self.binning, 'PypeIt: Binning')
        #   - Add the detector number
        prihdr['DET'] = (self.det, 'PypeIt: Detector')
        #   - Add the tracing parameters
        self.par.to_header(prihdr)

        # Set the data and extension names
        data = [self.align_dict['alignments']]
        extnames = ['ALIGNMENTS']
        # Write the output to a fits file
        save.write_fits(prihdr, data, _outfile, extnames=extnames)
        msgs.info('Master frame written to {0}'.format(_outfile))
Beispiel #15
0
    def compound_meta(self, headarr: List[fits.Header], meta_key: str):
        """
        Methods to generate metadata requiring interpretation of the header
        data, instead of simply reading the value of a header card.

        Args:
            headarr (:obj:`list`):
                List of `astropy.io.fits.Header`_ objects.
            meta_key (:obj:`str`):
                Metadata keyword to construct.

        Returns:
            object: Metadata value read from the header(s).
        """
        if meta_key == 'mjd':
            return Time(headarr[0]['UTSHUT']).mjd
        elif meta_key == 'dispangle':
            try:
                return Angle(headarr[0]['ANGLE'].lower()).deg
            except Exception as e:
                msgs.warn("Could not read dispangle from header:" +
                          msgs.newline() + str(headarr[0]['ANGLE']))
                raise e
        else:
            return None
Beispiel #16
0
def func_vander(x, func, deg, minx=None, maxx=None):
    if func == "polynomial":
        return np.polynomial.polynomial.polyvander(x, deg)
    elif func == "legendre":
        if minx is None or maxx is None:
            if np.size(x) == 1:
                xmin, xmax = -1.0, 1.0
            else:
                xmin, xmax = np.min(x), np.max(x)
        else:
            xmin, xmax = minx, maxx
        xv = 2.0 * (x-xmin)/(xmax-xmin) - 1.0
        return np.polynomial.legendre.legvander(xv, deg)
    elif func == "chebyshev":
        if minx is None or maxx is None:
            if np.size(x) == 1:
                xmin, xmax = -1.0, 1.0
            else:
                xmin, xmax = np.min(x), np.max(x)
        else:
            xmin, xmax = minx, maxx
        xv = 2.0 * (x-xmin)/(xmax-xmin) - 1.0
        return np.polynomial.chebyshev.chebvander(xv, deg)
    else:
        msgs.error("Fitting function '{0:s}' is not implemented yet" + msgs.newline() +
                   "Please choose from 'polynomial', 'legendre', 'chebyshev'")
Beispiel #17
0
    def load_master(self, filename):
        """
        Load a full (all slit) wv_calib dict

        Includes converting the JSON lists of particular items into ndarray

        Fills self.wv_calib and self.par

        Args:
            filename (str): Master file

        Returns:
            dict or None: self.wv_calib

        """


        # Does the master file exist?
        if not os.path.isfile(filename):
            msgs.warn("No Master frame found of type {:s}: {:s}".format(self.frametype, filename))
            return None, None
        else:
            msgs.info("Loading Master {0:s} frame:".format(self.frametype) + msgs.newline() + filename)
            self.wv_calib = linetools.utils.loadjson(filename)
            # Recast a few items as arrays
            for key in self.wv_calib.keys():
                if key in ['steps', 'par']:  # This isn't really necessary
                    continue
                for tkey in self.wv_calib[key].keys():
                    if isinstance(self.wv_calib[key][tkey], list):
                        self.wv_calib[key][tkey] = np.array(self.wv_calib[key][tkey])
            # parset
            if 'par' in self.wv_calib.keys():
                self.par = self.wv_calib['par'].copy()
            return self.wv_calib, None
Beispiel #18
0
def _read_pypeit_file_lines(ifile):
    """
    General parser for a pypeit file.

    - Checks that the file exists.
    - Reads all the lines in the file
    - Removes comments, empty lines, and replaces special characters.
    
    Applies to settings, setup, and user-level reduction files.

    Args:
        ifile (str): Name of the file to parse.

    Returns:
        :obj:`numpy.ndarray`: Returns a list of the valid lines in the
        files.
    """
    # Check the files
    if not os.path.isfile(ifile):
        msgs.error('The filename does not exist -' + msgs.newline() + ifile)

    # Read the input lines and replace special characters
    with open(ifile, 'r') as f:
        lines = np.array([l.replace('\t', ' ').replace('\n', ' ').strip() \
                                for l in f.readlines()])
    # Remove empty or fully commented lines
    lines = lines[np.array([len(l) > 0 and l[0] != '#' for l in lines])]
    # Remove appended comments and return
    return np.array([l.split('#')[0] for l in lines])
Beispiel #19
0
    def save(self, outfile=None, overwrite=True):
        """
        Save the wavelength calibration data to a master frame.

        This is largely a wrapper for
        :func:`pypeit.core.wavecal.waveio.save_wavelength_calibration`.

        Args:
            outfile (:obj:`str`, optional):
                Name for the output file.  Defaults to
                :attr:`file_path`.
            overwrite (:obj:`bool`, optional):
                Overwrite any existing file.
        """
        _outfile = self.master_file_path if outfile is None else outfile
        # Check if it exists
        if os.path.exists(_outfile) and not overwrite:
            msgs.warn('Master file exists: {0}'.format(_outfile) +
                      msgs.newline() + 'Set overwrite=True to overwrite it.')
            return

        # Report and save

        # jsonify has the annoying property that it modifies the objects
        # when it jsonifies them so make a copy, which converts lists to
        # arrays, so we make a copy
        data_for_json = copy.deepcopy(self.wv_calib)
        gddict = linetools.utils.jsonify(data_for_json)
        linetools.utils.savejson(_outfile,
                                 gddict,
                                 easy_to_read=True,
                                 overwrite=True)
        msgs.info('Master frame written to {0}'.format(_outfile))
Beispiel #20
0
    def save(self, outfile=None, overwrite=True):
        """
        Save the wavelength calibration data to a master frame.

        This is largely a wrapper for
        :func:`pypeit.core.wavecal.waveio.save_wavelength_calibration`.

        Args:
            outfile (:obj:`str`, optional):
                Name for the output file.  Defaults to
                :attr:`file_path`.
            overwrite (:obj:`bool`, optional):
                Overwrite any existing file.
        """
        _outfile = self.file_path if outfile is None else outfile
        # Check if it exists
        if os.path.exists(_outfile) and not overwrite:
            msgs.warn('Master file exists: {0}'.format(_outfile) +
                      msgs.newline() + 'Set overwrite=True to overwrite it.')
            return

        # Report and save
        waveio.save_wavelength_calibration(_outfile,
                                           self.wv_calib,
                                           overwrite=overwrite)
        msgs.info('Master frame written to {0}'.format(_outfile))
Beispiel #21
0
    def load_master(self, filename, exten=0, force=False):
        """
        Load the master frame

        Args:
            filename (str):  Master file
            exten (int, optinal):
            force (bool, optional):

        Returns:
            dict:  Tilts dict

        """


        # Does the master file exist?
        if not os.path.isfile(filename):
            msgs.warn("No Master frame found of type {:s}: {:s}".format(self.frametype, filename))
            if force:
                msgs.error("Crashing out because reduce-masters-force=True:" + msgs.newline() + filename)
            return None, None
        else:
            msgs.info("Loading a pre-existing master calibration frame of type: {:}".format(self.frametype) + " from filename: {:}".format(filename))
            hdu = fits.open(filename)
            head0 = hdu[0].header
            tilts = hdu[0].data
            coeffs = hdu[1].data
            slitcen = hdu[2].data
            spat_order = hdu[3].data
            spec_order = hdu[4].data
            tilts_dict = {'tilts':tilts, 'coeffs':coeffs, 'slitcen':slitcen,
                          'func2d':hdu[0].header['FUNC2D'], 'nslit':hdu[0].header['NSLIT'],
                          'spat_order':spat_order, 'spec_order':spec_order}
            return tilts_dict, head0
Beispiel #22
0
def key_none_allowed_filename(v, allowed):
    """ Check if a keyword argument is set to None. If not,
    check that its value is in the 'allowed' list. Finally,
    assume that the supplied value is the name of a filename.

    Parameters
    ----------
    v : str
      value of a keyword argument
    allowed : list
      list of allowed values that v can take

    Returns
    -------
    v : None, str
      A value used by the settings dictionary
    """
    if v.lower() == "none":
        v = None
    elif v.lower() in allowed:
        for i in allowed:
            if v.lower() == i:
                v = i
                break
    else:
        msgs.info("Assuming the following is the name of a file:" +
                  msgs.newline() + v)
    return v
Beispiel #23
0
    def get_headarr(self, filename, strict=True):
        """
        Read the header data from all the extensions in the file.

        Args:
            filename (:obj:`str`):
                Name of the file to read.
            strict (:obj:`bool`, optional):
                Function will fault if :func:`fits.getheader` fails to
                read any of the headers.  Set to False to report a
                warning and continue.

        Returns:
            list: Returns a list of :attr:`numhead` :obj:`fits.Header`
            objects with the extension headers.
        """
        headarr = ['None'] * self.numhead
        for k in range(self.numhead):
            try:
                headarr[k] = fits.getheader(filename, ext=k)
            except:
                if strict:
                    msgs.error("Header error in extension {0} in {1}.".format(
                        k, filename))
                else:
                    msgs.warn('Bad header in extension {0} in {1}'.format(
                        k, filename) + msgs.newline() +
                              'Proceeding on the hopes this was a ' +
                              'calibration file, otherwise consider removing.')
        return headarr
Beispiel #24
0
def key_none_allowed(v, allowed):
    """ Check if a keyword argument is set to None. If not,
    check that its value is in the 'allowed' list.

    Parameters
    ----------
    v : str
      value of a keyword argument
    allowed : list
      list of allowed values that v can take

    Returns
    -------
    v : None, str
      A value used by the settings dictionary
    """
    ll = inspect.currentframe().f_back.f_code.co_name.split('_')
    func_name = "'" + " ".join(ll) + "'"
    if v.lower() == "none":
        v = None
    elif v.lower() in allowed:
        for i in allowed:
            if v.lower() == i:
                v = i
                break
    else:
        msgs.error("The argument of {0:s} must be one of".format(func_name) +
                   msgs.newline() + ", ".join(allowed))
    return v
Beispiel #25
0
def key_allowed(v, allowed, upper=False):
    """ Check that a keyword argument is in an allowed list of parameters.

    Parameters
    ----------
    v : str
      value of a keyword argument
    allowed : list
      list of allowed values that v can take
    upper : bool (optional)
      If True, the allowed list is expected to contain only
      uppercase strings. If False, the allowed list is expected
      to contain only lowercase strings.

    Returns
    -------
    v : str
      A string used by the settings dictionary
    """
    ll = inspect.currentframe().f_back.f_code.co_name.split('_')
    func_name = "'" + " ".join(ll) + "'"
    if upper:
        v = v.upper()
    else:
        v = v.lower()
    if v not in allowed:
        msgs.error("The argument of {0:s} must be one of".format(func_name) +
                   msgs.newline() + ", ".join(allowed))
    if v.lower() == "none":
        v = None
    return v
Beispiel #26
0
def load_tree(polygon=4, numsearch=20):
    """
    Load a KDTree of ThAr patterns that is stored on disk

    Parameters
    ----------
    polygon : int
        Number of sides to the polygon used in pattern matching:

            - polygon=3  -->  trigon (two anchor lines and one floating line)
            - polygon=4  -->  tetragon (two anchor lines and two floating lines)
            - polygon=5  -->  pentagon (two anchor lines and three floating lines)

    numsearch : int
        Number of consecutive detected lines used to generate a pattern.
        For example, if numsearch is 4, then for a trigon, the following
        patterns will be generated (assuming line #1 is the left
        anchor):

            - 1 2 3  (in this case line #3 is the right anchor)
            - 1 2 4  (in this case line #4 is the right anchor)
            - 1 3 4  (in this case line #4 is the right anchor)

    Returns
    -------
    file_load : KDTree instance
        The KDTree containing the patterns
    index : ndarray
        For each pattern in the KDTree, this array stores the
        corresponding index in the linelist
    """

    # TODO: Can we save these as fits files instead?
    # TODO: Please don't use imports within functions
    import pickle
    filename = os.path.join(
        data.Paths.linelist,
        f'ThAr_patterns_poly{polygon}_search{numsearch}.kdtree')
    fileindx = os.path.join(
        data.Paths.linelist,
        f'ThAr_patterns_poly{polygon}_search{numsearch}.index.npy')
    try:
        file_load = pickle.load(open(filename, 'rb'))
        index = np.load(fileindx)
    except FileNotFoundError:
        msgs.info(
            'The requested KDTree was not found on disk' + msgs.newline() +
            'please be patient while the ThAr KDTree is built and saved to disk.'
        )
        from pypeit.core.wavecal import kdtree_generator
        file_load, index = kdtree_generator.main(polygon,
                                                 numsearch=numsearch,
                                                 verbose=True,
                                                 ret_treeindx=True,
                                                 outname=filename)

    return file_load, index
Beispiel #27
0
def insufficient_frame_error(frametype):
    msgs.error('Insufficient {0} frames found. Include more frames, '.format(
        frametype) + 'reduce the required amount by setting' + msgs.newline() +
               '[calibrations]' + msgs.newline() +
               '    [[{0}frame]]'.format(frametype) + msgs.newline() +
               '        number = XX' + msgs.newline() +
               'in the pypeit file, or specify a specific' +
               'pixelflat file by setting' + msgs.newline() +
               '[calibrations]' + msgs.newline() +
               '    [[{0}frame]]'.format(frametype) + msgs.newline() +
               '        useframe = XX' + msgs.newline() + 'in the pypeit file')
Beispiel #28
0
def func_der(coeffs, func, nderive=1):
    if func == "polynomial":
        return np.polynomial.polynomial.polyder(coeffs, m=nderive)
    elif func == "legendre":
        return np.polynomial.legendre.legder(coeffs, m=nderive)
    elif func == "chebyshev":
        return np.polynomial.chebyshev.chebder(coeffs, m=nderive)
    else:
        msgs.error("Functional derivative '{0:s}' is not implemented yet"+msgs.newline() +
                   "Please choose from 'polynomial', 'legendre', 'chebyshev'")
Beispiel #29
0
def key_keyword(v, force_format=True):
    """ Check that a keyword argument satisfies the form required
    for specifying a header keyword.

    Parameters
    ----------
    v : str
      value of a keyword argument
    force_format : bool
      If True, v can only have the format of a header keyword.
      If False, v can take the form of a header keyword, or a
      user-specified value. In the latter case, the boolean
      variable 'valid' is returned so the parent function
      knows if the supplied value of v should be dealt with as
      a manual entry or as a header keyword.

    Returns
    -------
    v : str
      A value used by the settings dictionary
    valid : bool
      Is the input a valid header keyword format
    """
    ll = inspect.currentframe().f_back.f_code.co_name.split('_')
    func_name = "'" + " ".join(ll) + "'"
    if v.lower() == "none":
        v = None
    else:
        valid = is_keyword(v)
        if not valid and force_format:
            msgs.error(
                "The argument of {0:s} must be of the form:".format(
                    func_name) + msgs.newline() + "##.NAME" + msgs.newline() +
                "where ## is the fits extension (see command: fits headext##),"
                + msgs.newline() + "and NAME is the header keyword name")
        elif valid and force_format:
            return v
        else:
            return v, valid
    return v
Beispiel #30
0
def get_pc(data, k, tol=0.0, maxiter=20, nofix=False, noortho=False):

    p = data.shape[0]
    if p == 0:
        msgs.error("You need to supply more components in the PCA")
    #n = np.size(data)/p
    if k > p:
        debugger.set_trace()
        msgs.error(
            "The number of principal components must be less than or equal" +
            msgs.newline() + "to the order of the fitting function")

    # Set the initial conditions
    eigv = np.zeros((p, k))
    eigv[:k, :k] = np.identity(k)

    niter = 0
    diff = tol * 2.0 + 1.0

    while (niter < maxiter) and (diff > tol):
        hidden = np.dot(np.dot(np.linalg.inv(np.dot(eigv.T, eigv)), eigv.T),
                        data)
        oldeigv = eigv.copy()
        eigv = np.dot(
            data, np.dot(hidden.T, np.linalg.inv(np.dot(hidden, hidden.T))))
        if tol > 0.0:
            diff = 0.0
            for i in range(k):
                diff += np.abs(
                    1.0 - np.sum(oldeigv[:, i] * eigv[:, i]) /
                    np.sqrt(np.sum(oldeigv[:, i]**2) * np.sum(eigv[:, i]**2)))
        niter += 1

    # Orthonormalize?
    if not noortho:
        for b in range(k):
            # Orthogonalize
            for bp in range(b):
                dot = np.sum(eigv[:, b] * eigv[:, bp])
                eigv[:, b] -= dot * eigv[:, bp]
            # Normalize
            dot = np.sum(eigv[:, b]**2)
            dot = 1.0 / np.sqrt(dot)
            eigv[:, b] *= dot
        # Project variables onto new coordinates?
        if not nofix:
            hidden = np.dot(eigv.T, data)
            eval_hidden, evec_hidden = do_pca(hidden.T, cov=True)
            eigv = np.dot(eigv, evec_hidden.T)
        hidden = np.dot(eigv.T, data)
    return eigv, hidden