Esempio n. 1
0
    def parse_text_header(self, Table):
        """
        Grab relevant parameters from a table header (xaxis type, etc)

        This function should only exist for Spectrum objects created from
        .txt or other atpy table type objects
        """
        self.Table = Table
        xtype = Table.data.dtype.names[Table.xaxcol]
        if xtype in units.xtype_dict.values():
            self.xarr.xtype = xtype
            self.xarr.units = Table.columns[xtype].unit
        elif xtype in units.xtype_dict:
            self.xarr.xtype = units.xtype_dict[xtype]
            self.xarr.units = Table.columns[xtype].unit
        else:
            warn(
                "Warning: Invalid xtype in text header - this may mean no text header was available.  X-axis units will be pixels unless you set them manually (e.g., sp.xarr.unit='angstroms')"
            )
            self.xarr.xtype = 'pixels'
            self.xarr.units = 'none'
            #raise ValueError("Invalid xtype in text header")
        self.ytype = Table.data.dtype.names[Table.datacol]
        try:
            self.units = Table.columns[self.ytype].unit
        except ValueError:
            self.units = None
            pass  # astropy 0.2.dev11 introduces an incompatibility here
        self.header = pyfits.Header()
        self._update_header()
Esempio n. 2
0
    def parse_text_header(self, Table):
        """
        Grab relevant parameters from a table header (xaxis type, etc)

        This function should only exist for Spectrum objects created from
        .txt or other atpy table type objects
        """
        self.Table = Table
        xtype = Table.data.dtype.names[Table.xaxcol]
        if xtype in units.xtype_dict.values():
            self.xarr.xtype = xtype
            self.xarr.units = Table.columns[xtype].unit
        elif xtype in units.xtype_dict:
            self.xarr.xtype = units.xtype_dict[xtype]
            self.xarr.units = Table.columns[xtype].unit
        else:
            warn(
                "Warning: Invalid xtype in text header - this may mean no text header was available.  X-axis units will be pixels unless you set them manually (e.g., sp.xarr.unit='angstroms')"
            )
            self.xarr.xtype = "pixels"
            self.xarr.units = "none"
            # raise ValueError("Invalid xtype in text header")
        self.ytype = Table.data.dtype.names[Table.datacol]
        try:
            self.units = Table.columns[self.ytype].unit
        except ValueError:
            self.units = None
            pass  # astropy 0.2.dev11 introduces an incompatibility here
        self.header = pyfits.Header()
        self._update_header()
Esempio n. 3
0
    def __init__(self, speclist, xunits="GHz", **kwargs):
        print "Creating spectra"
        speclist = list(speclist)
        for ii, spec in enumerate(speclist):
            if type(spec) is str:
                spec = Spectrum(spec)
                speclist[ii] = spec

        self.speclist = speclist

        print "Concatenating data"
        self.xarr = units.SpectroscopicAxes([sp.xarr.as_unit(xunits) for sp in speclist])
        self.xarr.units = xunits
        self.xarr.xtype = units.unit_type_dict[xunits]
        self.data = np.ma.concatenate([sp.data for sp in speclist])
        self.error = np.ma.concatenate([sp.error for sp in speclist])
        self._sort()

        self.header = pyfits.Header()
        for spec in speclist:
            for key, value in spec.header.items():
                try:
                    self.header.update(key, value)
                except ValueError, KeyError:
                    warn("Could not update header KEY=%s to VALUE=%s" % (key, value))
Esempio n. 4
0
def make_multispec_axis(hdr, axsplit, WAT1_dict):
    num,beam,dtype,crval,cdelt,naxis,z,aplow,aphigh = axsplit[:9]
    # this is a hack for cropped spectra...
    #print "header naxis: %i, WAT naxis: %i" % (hdr['NAXIS1'], int(naxis))
    if hdr['NAXIS1'] != int(naxis):
        crpix = int(naxis) - hdr['NAXIS']
        naxis = hdr['NAXIS1']
        warn("Treating as cropped echelle spectrum.")
    else:
        crpix = 0
    
    if len(axsplit) > 9:
        functions = axsplit[9:]
        warn("Found but did not use functions %s" % str(functions))
        
    if int(dtype) == 0:
        # Linear dispersion (eq 11, p.9 from Valdez, linked above)
        xax = (float(crval) + float(cdelt) * (np.arange(int(naxis)))) / (1.+float(z))
    elif int(dtype) == 1:
        # Log-linear dispersion (eq 12, p.9 from Valdez, linked above)
        xax = 10.**(float(crval) + float(cdelt) * (np.arange(int(naxis)))) / (1.+float(z))
    # elif int(dtype) == 2:
        # Non-linear dispersion
    # elif int(dtype) == -1:
        # Data is not dispersion coords
    else: raise ValueError("Unrecognized MULTISPE dispersion in IRAF Echelle specification")
    
    headerkws = {'CRPIX1':1, 'CRVAL1':crval, 'CDELT1':cdelt, 'NAXIS1':naxis, 'NAXIS':1, 'REDSHIFT':z, 'CTYPE1':'wavelength', 'CUNIT1':WAT1_dict['units']}
    
    return xax, naxis, headerkws
Esempio n. 5
0
def make_linear_axis(hdr, axsplit, WAT1_dict):
    num, beam, dtype, crval, cdelt, naxis, z, aplow, aphigh = axsplit[:9]
    # this is a hack for cropped spectra...
    #print "header naxis: %i, WAT naxis: %i" % (hdr['NAXIS1'], int(naxis))
    if hdr['NAXIS1'] != int(naxis):
        naxis = hdr['NAXIS1']
        crpix = hdr.get('CRPIX1')
        warn("Treating as cropped echelle spectrum.")
    else:
        crpix = 0

    if len(axsplit) > 9:
        functions = axsplit[9:]
        warn("Found but did not use functions %s" % str(functions))

    if int(dtype) == 0:
        # Linear dispersion (eq 2, p.5 from Valdez, linked above)
        xax = ((float(crval) + float(cdelt) *
                (np.arange(int(naxis)) + 1 - crpix)) / (1. + float(z)))
    else:
        raise ValueError(
            "Unrecognized LINEAR dispersion in IRAF Echelle specification")

    headerkws = {
        'CRPIX1': 1,
        'CRVAL1': crval,
        'CDELT1': cdelt,
        'NAXIS1': naxis,
        'NAXIS': 1,
        'REDSHIFT': z,
        'CTYPE1': 'wavelength',
        'CUNIT1': WAT1_dict['units']
    }

    return xax, naxis, headerkws
Esempio n. 6
0
    def line_ids_from_measurements(self,
                                   auto_yloc=True,
                                   auto_yloc_fraction=0.9,
                                   **kwargs):
        """
        Add line ID labels to a plot using lineid_plot
        http://oneau.wordpress.com/2011/10/01/line-id-plot/
        https://github.com/phn/lineid_plot
        http://packages.python.org/lineid_plot/

        Parameters
        ----------
        auto_yloc : bool
            If set, overrides box_loc and arrow_tip (the vertical position of
            the lineid labels) in kwargs to be `auto_yloc_fraction` of the plot
            range
        auto_yloc_fraction: float in range [0,1]
            The fraction of the plot (vertically) at which to place labels

        Examples
        --------
        >>> import numpy as np
        >>> import pyspeckit
        >>> sp = pyspeckit.Spectrum(
                xarr=pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,101),
                    units='km/s', refX=6562.8, refX_unit='angstroms'),
                data=np.random.randn(101), error=np.ones(101))
        >>> sp.plotter()
        >>> sp.specfit(multifit=None, fittype='gaussian', guesses=[1,0,1]) # fitting noise....
        >>> sp.measure()
        >>> sp.plotter.line_ids_from_measurements()
        """
        import lineid_plot

        if hasattr(self.Spectrum, 'measurements'):
            measurements = self.Spectrum.measurements

            if auto_yloc:
                yr = self.axis.get_ylim()
                kwargs['box_loc'] = (yr[1] -
                                     yr[0]) * auto_yloc_fraction + yr[0]
                kwargs['arrow_tip'] = (yr[1] - yr[0]) * (auto_yloc_fraction *
                                                         0.9) + yr[0]

            lineid_plot.plot_line_ids(
                self.Spectrum.xarr,
                self.Spectrum.data,
                [v['pos'] for v in measurements.lines.values()],
                measurements.lines.keys(),
                ax=self.axis,
                **kwargs)
        else:
            warn(
                "Cannot add line IDs from measurements unless measurements have been made!"
            )
Esempio n. 7
0
def write_history(header, string):
    """
    Add a line to the header's history
    """
    if not isinstance(header, pyfits.Header):
        warn("header is not a header instance!")
    hdrstring = time.strftime("%m/%d/%y %H:%M:%S",time.localtime()) + " " + string
    try:
        header.add_history(hdrstring)
    except AttributeError:
        print "WARNING: Error in history writing.  Could not add this string: %s" % hdrstring
Esempio n. 8
0
def read_echelle(pyfits_hdu):
    """
    Read an IRAF Echelle spectrum
    
    http://iraf.noao.edu/iraf/ftp/iraf/docs/specwcs.ps.Z
    """

    hdr = pyfits_hdu.header

    WAT1_dict, specaxdict = _get_WATS(hdr)

    x_axes = []

    for specnum, axstring in specaxdict.iteritems():
        axsplit = axstring.replace('"', '').split()
        if specnum != int(axsplit[0]):
            raise ValueError("Mismatch in IRAF Echelle specification")
        num, beam, dtype, crval, cdelt, naxis, z, aplow, aphigh = axsplit[:9]

        # this is a hack for cropped spectra...
        #print "header naxis: %i, WAT naxis: %i" % (hdr['NAXIS1'], int(naxis))
        if hdr['NAXIS1'] != int(naxis):
            naxis = hdr['NAXIS1']
            crpix = hdr.get('CRPIX1')
            warn("Treating as cropped echelle spectrum.")
        else:
            crpix = 0

        if len(axsplit) > 9:
            functions = axsplit[9:]
            warn("Found but did not use functions %s" % str(functions))

        if int(dtype) == 0:
            xax = (float(crval) + float(cdelt) *
                   (np.arange(int(naxis)) + 1 - crpix)) / (1. + float(z))

        headerkws = {
            'CRPIX1': 1,
            'CRVAL1': crval,
            'CDELT1': cdelt,
            'NAXIS1': naxis,
            'NAXIS': 1,
            'REDSHIFT': z,
            'CTYPE1': 'wavelength',
            'CUNIT1': WAT1_dict['units']
        }
        cards = [pyfits.Card(k, v) for (k, v) in headerkws.iteritems()]
        header = pyfits.Header(cards)

        xarr = make_axis(xax, header)
        x_axes.append(xarr)

    return pyfits_hdu.data, pyfits_hdu.data * 0, units.EchelleAxes(x_axes), hdr
Esempio n. 9
0
def write_history(header, string):
    """
    Add a line to the header's history
    """
    if not isinstance(header, pyfits.Header):
        warn("header is not a header instance!")
    hdrstring = time.strftime("%m/%d/%y %H:%M:%S",
                              time.localtime()) + " " + string
    try:
        header.add_history(hdrstring)
    except AttributeError:
        print "WARNING: Error in history writing.  Could not add this string: %s" % hdrstring
Esempio n. 10
0
def make_axis(xarr,hdr,specname=None, wcstype='', specaxis="1", verbose=True,
        **kwargs):
    """
    Parse parameters from a .fits header into required SpectroscopicAxis
    parameters
    """

    #DEBUG if wcstype is not '': print "Loading file with WCSTYPE %s" % wcstype

    xunits = hdr.get('CUNIT%s%s' % (specaxis,wcstype))
    if hdr.get('ORIGIN') == 'CLASS-Grenoble' and xunits is None:
        # CLASS default
        xunits = 'Hz'
        
    # SDSS doesn't use FITS standard! Argh.    
    if hdr.get('TELESCOP') == 'SDSS 2.5-M':   
        xunits = 'angstrom' 

    # IRAF also doesn't use the same standard
    if xunits is None:
        if hdr.get('WAT1_001') is not None:
            pairs = hdr.get('WAT1_001').split()
            pdict = dict( [s.split("=") for s in pairs] )
            if 'units' in pdict:
                xunits = pdict['units']

    if hdr.get('REFFREQ'+wcstype):
        refX = hdr.get('REFFREQ'+wcstype)
    elif hdr.get('RESTFREQ'+wcstype):
        refX = hdr.get('RESTFREQ'+wcstype)
    elif hdr.get('RESTFRQ'+wcstype):
        refX = hdr.get('RESTFRQ'+wcstype)
    else:
        if verbose: warn( "Warning: No reference frequency found.  Velocity transformations will not be possible unless you set a reference frequency/wavelength" )
        refX = None

    if hdr.get('CTYPE%s%s' % (specaxis,wcstype)):
        xtype = hdr.get('CTYPE%s%s' % (specaxis,wcstype))
    else:
        xtype = 'VLSR'

    if hdr.get('VELDEF'):
        convention, frame = units.parse_veldef(hdr['VELDEF'])
        vframe = hdr.get('VFRAME') if hdr.get('VFRAME') is not None else 0.0
    else:
        convention, frame = _parse_velocity_convention(hdr.get('CTYPE%s%s' % (specaxis,wcstype))), None
        vframe = 0.0

    XAxis = units.SpectroscopicAxis(xarr, xunits, refX=refX,
            velocity_convention=convention, **kwargs)

    return XAxis
Esempio n. 11
0
    def line_ids_from_measurements(self, auto_yloc=True,
            auto_yloc_fraction=0.9, **kwargs):
        """
        Add line ID labels to a plot using lineid_plot
        http://oneau.wordpress.com/2011/10/01/line-id-plot/
        https://github.com/phn/lineid_plot
        http://packages.python.org/lineid_plot/

        Parameters
        ----------
        auto_yloc : bool
            If set, overrides box_loc and arrow_tip (the vertical position of
            the lineid labels) in kwargs to be `auto_yloc_fraction` of the plot
            range
        auto_yloc_fraction: float in range [0,1]
            The fraction of the plot (vertically) at which to place labels

        Examples
        --------
        >>> import numpy as np
        >>> import pyspeckit
        >>> sp = pyspeckit.Spectrum(
                xarr=pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,101),
                    units='km/s', refX=6562.8, refX_units='angstroms'),
                data=np.random.randn(101), error=np.ones(101))
        >>> sp.plotter()
        >>> sp.specfit(multifit=True, fittype='gaussian', guesses=[1,0,1]) # fitting noise....
        >>> sp.measure()
        >>> sp.plotter.line_ids_from_measurements()
        """
        import lineid_plot  

        if hasattr(self.Spectrum,'measurements'):
            measurements = self.Spectrum.measurements

            if auto_yloc:
                yr = self.axis.get_ylim()
                kwargs['box_loc'] = (yr[1]-yr[0])*auto_yloc_fraction + yr[0]
                kwargs['arrow_tip'] = (yr[1]-yr[0])*(auto_yloc_fraction*0.9) + yr[0]

            lineid_plot.plot_line_ids(self.Spectrum.xarr,
                    self.Spectrum.data, 
                    [v['pos'] for v in measurements.lines.values()],
                    measurements.lines.keys(),
                    ax=self.axis,
                    **kwargs)
        else:
            warn("Cannot add line IDs from measurements unless measurements have been made!")
Esempio n. 12
0
def read_echelle(pyfits_hdu):
    """
    Read an IRAF Echelle spectrum
    
    http://iraf.noao.edu/iraf/ftp/iraf/docs/specwcs.ps.Z
    """

    hdr = pyfits_hdu.header

    WAT1_dict, specaxdict = _get_WATS(hdr)

    x_axes = []

    for specnum, axstring in specaxdict.iteritems():
        axsplit = axstring.replace('"','').split()
        if specnum != int(axsplit[0]):
            raise ValueError("Mismatch in IRAF Echelle specification")
        num,beam,dtype,crval,cdelt,naxis,z,aplow,aphigh = axsplit[:9]
        
        # this is a hack for cropped spectra...
        #print "header naxis: %i, WAT naxis: %i" % (hdr['NAXIS1'], int(naxis))
        if hdr['NAXIS1'] != int(naxis):
            naxis = hdr['NAXIS1']
            crpix = hdr.get('CRPIX1')
            warn("Treating as cropped echelle spectrum.")
        else:
            crpix = 0

        if len(axsplit) > 9:
            functions = axsplit[9:]
            warn("Found but did not use functions %s" % str(functions))

        if int(dtype) == 0:
            xax = ( float(crval) + float(cdelt) * (np.arange(int(naxis)) + 1 - crpix) ) / (1.+float(z))

        headerkws = {'CRPIX1':1, 'CRVAL1':crval, 'CDELT1':cdelt,
                'NAXIS1':naxis, 'NAXIS':1, 'REDSHIFT':z,
                'CTYPE1':'wavelength', 'CUNIT1':WAT1_dict['units']}
        cards = [pyfits.Card(k,v) for (k,v) in headerkws.iteritems()]
        header = pyfits.Header(cards)

        xarr = make_axis(xax,header)
        x_axes.append(xarr)
    
    return pyfits_hdu.data, pyfits_hdu.data*0, units.EchelleAxes(x_axes), hdr
Esempio n. 13
0
    def __init__(self, speclist, xunits='GHz', **kwargs):
        print "Creating spectra"
        speclist = list(speclist)
        for ii, spec in enumerate(speclist):
            if type(spec) is str:
                spec = Spectrum(spec)
                speclist[ii] = spec

        self.speclist = speclist

        print "Concatenating data"
        self.xarr = units.SpectroscopicAxes(
            [sp.xarr.as_unit(xunits) for sp in speclist])
        self.xarr.units = xunits
        self.xarr.xtype = units.unit_type_dict[xunits]
        self.data = np.ma.concatenate([sp.data for sp in speclist])
        self.error = np.ma.concatenate([sp.error for sp in speclist])
        self._sort()

        self.header = pyfits.Header()
        for spec in speclist:
            for key, value in spec.header.items():
                try:
                    self.header[key] = value
                except (ValueError, KeyError):
                    warn("Could not update header KEY=%s to VALUE=%s" %
                         (key, value))

        self.plotter = plotters.Plotter(self)
        self._register_fitters()
        self.specfit = fitters.Specfit(self, Registry=self.Registry)
        self.baseline = baseline.Baseline(self)

        self.units = speclist[0].units
        for spec in speclist:
            if spec.units != self.units:
                raise ValueError("Mismatched units")

        # Special.  This needs to be modified to be more flexible; for now I need it to work for nh3
        self.plot_special = None
        self.plot_special_kwargs = {}
Esempio n. 14
0
    def __init__(self, speclist, xunits='GHz', **kwargs):
        print "Creating spectra"
        speclist = list(speclist)
        for ii,spec in enumerate(speclist):
            if type(spec) is str:
                spec = Spectrum(spec)
                speclist[ii] = spec

        self.speclist = speclist

        print "Concatenating data"
        self.xarr = units.SpectroscopicAxes([sp.xarr.as_unit(xunits) for sp in speclist])
        self.xarr.units = xunits 
        self.xarr.xtype = units.unit_type_dict[xunits]
        self.data = np.ma.concatenate([sp.data for sp in speclist])
        self.error = np.ma.concatenate([sp.error for sp in speclist])
        self._sort()

        self.header = pyfits.Header()
        for spec in speclist:
            for key,value in spec.header.items():
                try:
                    self.header[key] = value
                except (ValueError, KeyError):
                    warn("Could not update header KEY=%s to VALUE=%s" % (key,value))

        self.plotter = plotters.Plotter(self)
        self._register_fitters()
        self.specfit = fitters.Specfit(self,Registry=self.Registry)
        self.baseline = baseline.Baseline(self)
        
        self.unit = speclist[0].units
        for spec in speclist:
            if spec.unit != self.unit:
                raise ValueError("Mismatched units")

        # Special.  This needs to be modified to be more flexible; for now I need it to work for nh3
        self.plot_special = None
        self.plot_special_kwargs = {}
Esempio n. 15
0
def open_1d_txt(filename,
                xaxcol=0,
                datacol=1,
                errorcol=2,
                text_reader='simple',
                atpytype='ascii',
                **kwargs):
    """
    Attempt to read a 1D spectrum from a text file assuming wavelength as the
    first column, data as the second, and (optionally) error as the third.  

    Reading can be done either with atpy or a 'simple' reader.  If you have an
    IPAC, CDS, or formally formatted table, you'll want to use atpy.

    If you have a simply formatted file of the form, e.g.
    # name name
    # unit unit
    data data
    data data

    kwargs are passed to atpy.Table
    """
    if text_reader in ('simple', 'readcol') or not atpyOK:
        if not atpyOK:
            warn(
                "WARNING: atpy not installed; will use simple reader instead.")

        if text_reader == 'simple':
            data, error, XAxis, T = simple_txt(filename,
                                               xaxcol=xaxcol,
                                               datacol=datacol,
                                               errorcol=errorcol,
                                               **kwargs)
        elif text_reader == 'readcol':
            Tlist = readcol.readcol(filename, twod=False, **kwargs)
            XAxis = units.SpectroscopicAxis(Tlist[xaxcol])
            data = Tlist[datacol]
            error = Tlist[errorcol]

            T = dummy_class()
            Tdict = readcol.readcol(filename, asDict=True, **kwargs)

            T.data = dummy_class()
            T.data.dtype = dummy_class()
            T.data.dtype.names = hdr
            T.columns = {}
            T.columns[T.data.dtype.names[xaxcol]] = dummy_class()
            T.columns[T.data.dtype.names[xaxcol]].unit = colunits[xaxcol]
            T.columns[T.data.dtype.names[datacol]] = dummy_class()
            T.columns[T.data.dtype.names[datacol]].unit = colunits[datacol]

    elif text_reader in ('atpy', 'asciitable'):
        T = atpy.Table(filename, type=atpytype, masked=True, **kwargs)

        xarr = T.data[T.data.dtype.names[xaxcol]]
        data = T.data[T.data.dtype.names[datacol]]
        if len(T.columns) > errorcol:
            error = T.data[T.data.dtype.names[errorcol]]
        else:
            # assume uniform, zero error
            error = data * 0

        if 'xunits' in T.keywords:
            xunits = T.keywords['xunits']
        else:
            xunits = 'unknown'

        XAxis = units.SpectroscopicAxis(xarr, xunits)

    # Need this in Spectrum class to correctly parse header
    T.xaxcol = xaxcol
    T.datacol = datacol

    return data, error, XAxis, T
Esempio n. 16
0
"""
import numpy as np
from pyspeckit.mpfit import mpfit,mpfitException
from pyspeckit.spectrum.parinfo import ParinfoList,Parinfo
import copy
from astropy import log
import matplotlib.cbook as mpcb
from . import fitter
from . import mpfit_messages
from pyspeckit.specwarnings import warn
try:
    from collections import OrderedDict
except ImportError:
    from ordereddict import OrderedDict
except ImportError:
    warn( "OrderedDict is required for modeling.  If you have python <2.7, install the ordereddict module." ) 

class SpectralModel(fitter.SimpleFitter):
    """
    A wrapper class for a spectra model.  Includes internal functions to
    generate multi-component models, annotations, integrals, and individual
    components.  The declaration can be complex, since you should name
    individual variables, set limits on them, set the units the fit will be
    performed in, and set the annotations to be used.  Check out some
    of the hyperfine codes (hcn, n2hp) for examples.
    """

    def __init__(self, modelfunc, npars, 
                 shortvarnames=("A","\\Delta x","\\sigma"),
                 fitunits=None,
                 centroid_par=None,
Esempio n. 17
0
    def lmfitter(self, xax, data, err=None, parinfo=None, quiet=True, debug=False, **kwargs):
        """
        Use lmfit instead of mpfit to do the fitting

        Parameters
        ----------
        xax : SpectroscopicAxis 
            The X-axis of the spectrum
        data : ndarray
            The data to fit
        err : ndarray (optional)
            The error on the data.  If unspecified, will be uniform unity
        parinfo : ParinfoList
            The guesses, parameter limits, etc.  See
            `pyspeckit.spectrum.parinfo` for details
        quiet : bool
            If false, print out some messages about the fitting

        """
        try:
            import lmfit
        except ImportError as e:
            raise ImportError( "Could not import lmfit, try using mpfit instead." )

        self.xax = xax # the 'stored' xax is just a link to the original
        if hasattr(xax,'convert_to_unit') and self.fitunits is not None:
            # some models will depend on the input units.  For these, pass in an X-axis in those units
            # (gaussian, voigt, lorentz profiles should not depend on units.  Ammonia, formaldehyde,
            # H-alpha, etc. should)
            xax = copy.copy(xax)
            xax.convert_to_unit(self.fitunits, quiet=quiet)
        elif self.fitunits is not None:
            raise TypeError("X axis does not have a convert method")

        if np.any(np.isnan(data)) or np.any(np.isinf(data)):
            err[np.isnan(data) + np.isinf(data)] = np.inf
            data[np.isnan(data) + np.isinf(data)] = 0
        if np.any(np.isnan(err)):
            raise ValueError("One or more of the error values is NaN."
                             "  This is not allowed.  Errors can be infinite "
                             "(which is equivalent to giving zero weight to "
                             "a data point), but otherwise they must be positive "
                             "floats.")
        elif np.any(err<0):
            raise ValueError("At least one error value is negative, which is "
                             "not allowed as negative errors are not "
                             "meaningful in the optimization process.")


        if parinfo is None:
            parinfo, kwargs = self._make_parinfo(debug=debug, **kwargs)
            log.debug("Parinfo created from _make_parinfo: {0}".format(parinfo))

        LMParams = parinfo.as_Parameters()
        log.debug("LMParams: "+"\n".join([repr(p) for p in list(LMParams.values())]))
        log.debug("parinfo:  {0}".format(parinfo))
        minimizer = lmfit.minimize(self.lmfitfun(xax,np.array(data),err,debug=debug),LMParams,**kwargs)
        if not quiet:
            log.info("There were %i function evaluations" % (minimizer.nfev))
        #modelpars = [p.value for p in parinfo.values()]
        #modelerrs = [p.stderr for p in parinfo.values() if p.stderr is not None else 0]

        self.LMParams = LMParams
        self.parinfo._from_Parameters(LMParams)
        log.debug("LMParams: {0}".format(LMParams))
        log.debug("parinfo: {0}".format(parinfo))

        self.mp = minimizer
        self.mpp = self.parinfo.values
        self.mpperr = self.parinfo.errors
        self.mppnames = self.parinfo.names
        modelkwargs = {}
        modelkwargs.update(self.modelfunc_kwargs)
        self.model = self.n_modelfunc(self.parinfo, **modelkwargs)(xax)
        if hasattr(minimizer,'chisqr'):
            chi2 = minimizer.chisqr
        else:
            try:
                chi2 = (((data-self.model)/err)**2).sum()
            except TypeError:
                chi2 = ((data-self.model)**2).sum()
        if np.isnan(chi2):
            warn( "Warning: chi^2 is nan" )
    
        if hasattr(self.mp,'ier') and self.mp.ier not in [1,2,3,4]:
            log.warning("Fitter failed: %s, %s" % (self.mp.message, self.mp.lmdif_message))

        return self.mpp,self.model,self.mpperr,chi2
Esempio n. 18
0
import numpy as np
from pyspeckit.mpfit import mpfit, mpfitException
from pyspeckit.spectrum.parinfo import ParinfoList, Parinfo
import copy
from astropy import log
import matplotlib.cbook as mpcb
from . import fitter
from . import mpfit_messages
from pyspeckit.specwarnings import warn
try:
    from collections import OrderedDict
except ImportError:
    from ordereddict import OrderedDict
except ImportError:
    warn(
        "OrderedDict is required for modeling.  If you have python <2.7, install the ordereddict module."
    )


class SpectralModel(fitter.SimpleFitter):
    """
    A wrapper class for a spectra model.  Includes internal functions to
    generate multi-component models, annotations, integrals, and individual
    components.  The declaration can be complex, since you should name
    individual variables, set limits on them, set the units the fit will be
    performed in, and set the annotations to be used.  Check out some
    of the hyperfine codes (hcn, n2hp) for examples.
    """
    def __init__(self,
                 modelfunc,
                 npars,
Esempio n. 19
0
    def lmfitter(self,
                 xax,
                 data,
                 err=None,
                 parinfo=None,
                 quiet=True,
                 debug=False,
                 **kwargs):
        """
        Use lmfit instead of mpfit to do the fitting

        Parameters
        ----------
        xax : SpectroscopicAxis 
            The X-axis of the spectrum
        data : ndarray
            The data to fit
        err : ndarray (optional)
            The error on the data.  If unspecified, will be uniform unity
        parinfo : ParinfoList
            The guesses, parameter limits, etc.  See
            `pyspeckit.spectrum.parinfo` for details
        quiet : bool
            If false, print out some messages about the fitting

        """
        try:
            import lmfit
        except ImportError as e:
            raise ImportError(
                "Could not import lmfit, try using mpfit instead.")

        self.xax = xax  # the 'stored' xax is just a link to the original
        if hasattr(xax, 'convert_to_unit') and self.fitunits is not None:
            # some models will depend on the input units.  For these, pass in an X-axis in those units
            # (gaussian, voigt, lorentz profiles should not depend on units.  Ammonia, formaldehyde,
            # H-alpha, etc. should)
            xax = copy.copy(xax)
            xax.convert_to_unit(self.fitunits, quiet=quiet)
        elif self.fitunits is not None:
            raise TypeError("X axis does not have a convert method")

        if np.any(np.isnan(data)) or np.any(np.isinf(data)):
            err[np.isnan(data) + np.isinf(data)] = np.inf
            data[np.isnan(data) + np.isinf(data)] = 0
        if np.any(np.isnan(err)):
            raise ValueError(
                "One or more of the error values is NaN."
                "  This is not allowed.  Errors can be infinite "
                "(which is equivalent to giving zero weight to "
                "a data point), but otherwise they must be positive "
                "floats.")
        elif np.any(err < 0):
            raise ValueError("At least one error value is negative, which is "
                             "not allowed as negative errors are not "
                             "meaningful in the optimization process.")

        if parinfo is None:
            parinfo, kwargs = self._make_parinfo(debug=debug, **kwargs)
            log.debug(
                "Parinfo created from _make_parinfo: {0}".format(parinfo))

        LMParams = parinfo.as_Parameters()
        log.debug("LMParams: " +
                  "\n".join([repr(p) for p in list(LMParams.values())]))
        log.debug("parinfo:  {0}".format(parinfo))
        minimizer = lmfit.minimize(
            self.lmfitfun(xax, np.array(data), err, debug=debug), LMParams,
            **kwargs)
        if not quiet:
            log.info("There were %i function evaluations" % (minimizer.nfev))
        #modelpars = [p.value for p in parinfo.values()]
        #modelerrs = [p.stderr for p in parinfo.values() if p.stderr is not None else 0]

        self.LMParams = LMParams
        self.parinfo._from_Parameters(LMParams)
        log.debug("LMParams: {0}".format(LMParams))
        log.debug("parinfo: {0}".format(parinfo))

        self.mp = minimizer
        self.mpp = self.parinfo.values
        self.mpperr = self.parinfo.errors
        self.mppnames = self.parinfo.names
        modelkwargs = {}
        modelkwargs.update(self.modelfunc_kwargs)
        self.model = self.n_modelfunc(self.parinfo, **modelkwargs)(xax)
        if hasattr(minimizer, 'chisqr'):
            chi2 = minimizer.chisqr
        else:
            try:
                chi2 = (((data - self.model) / err)**2).sum()
            except TypeError:
                chi2 = ((data - self.model)**2).sum()
        if np.isnan(chi2):
            warn("Warning: chi^2 is nan")

        if hasattr(self.mp, 'ier') and self.mp.ier not in [1, 2, 3, 4]:
            log.warning("Fitter failed: %s, %s" %
                        (self.mp.message, self.mp.lmdif_message))

        return self.mpp, self.model, self.mpperr, chi2
Esempio n. 20
0
    def __init__(
        self,
        filename=None,
        filetype=None,
        xarr=None,
        data=None,
        error=None,
        header=None,
        doplot=False,
        maskdata=True,
        plotkwargs={},
        xarrkwargs={},
        **kwargs
    ):
        """
        Create a Spectrum object.

        Must either pass in a filename or ALL of xarr, data, and header, plus
        optionally error.

        kwargs are passed to the file reader

        Parameters
        ----------
        filename : string or pyfits.HDU
            The file to read the spectrum from.  If data, xarr, and error are
            specified, leave filename blank.
        filetype : string
            Specify the file type (only needed if it cannot be automatically
            determined from the filename)
        xarr : `units.SpectroscopicAxis` or `np.ndarray`
            The X-axis of the data.  If it is an np.ndarray, you must pass
            `xarrkwargs` or a valid header if you want to use any of the unit
            functionality.
        data : `np.ndarray`
            The data array (must have same length as xarr)
        error : `np.ndarray` 
            The error array (must have same length as the data and xarr arrays)
        header : `pyfits.Header` or dict
            The header from which to read unit information.  Needs to be a
            `pyfits.Header` instance or another dictionary-like object with the
            appropriate information
        maskdata : boolean
            turn the array into a masked array with all nan and inf values masked
        doplot : boolean
            Plot the spectrum after loading it?
        plotkwargs : dict
            keyword arguments to pass to the plotter
        xarrkwargs : dict
            keyword arguments to pass to the SpectroscopicAxis initialization
            (can be used in place of a header)

        Examples
        --------

        >>> sp = pyspeckit.Spectrum(data=np.random.randn(100),
                    xarr=np.linspace(-50, 50, 100), error=np.ones(100)*0.1, 
                    xarrkwargs={'unit':'km/s', 'refX':4.829, 'refX_units':'GHz',
                        'xtype':'VLSR-RAD'}, header={})

        >>> xarr = pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,100),
                    units='km/s', refX=6562.83, refX_units='angstroms')
        >>> data = np.random.randn(100)*5 + np.random.rand(100)*100
        >>> err = np.sqrt(data/5.)*5. # Poisson noise
        >>> sp = pyspeckit.Spectrum(data=data, error=err, xarr=xarr, header={}) 
        
        >>> # if you already have a simple fits file
        >>> sp = pyspeckit.Spectrum('test.fits')
        """

        if filename:
            if filetype is None:
                suffix = filename.rsplit(".", 1)[1]
                if suffix in readers.suffix_types:
                    # use the default reader for that suffix
                    filetype = readers.suffix_types[suffix][0]
                    reader = readers.readers[filetype]
                else:
                    raise TypeError("File with suffix %s is not recognized." % suffix)
            else:
                if filetype in readers.readers:
                    reader = readers.readers[filetype]
                else:
                    raise TypeError("Filetype %s not recognized" % filetype)

            self.data, self.error, self.xarr, self.header = reader(filename, **kwargs)

            # these should probably be replaced with registerable function s...
            if filetype in ("fits", "tspec", "pyfits", "sdss"):
                self.parse_header(self.header)
            elif filetype is "txt":
                self.parse_text_header(self.header)
            elif filetype in ("hdf5", "h5"):
                self.parse_hdf5_header(self.header)

            if isinstance(filename, str):
                self.fileprefix = filename.rsplit(".", 1)[0]  # Everything prior to .fits or .txt
        elif xarr is not None and data is not None:
            # technically, this is unpythonic.  But I don't want to search for all 10 attributes required.
            if issubclass(type(xarr), units.SpectroscopicAxis):
                self.xarr = xarr
            else:
                self.xarr = units.SpectroscopicAxis(xarr, **xarrkwargs)
            self.data = data
            if error is not None:
                self.error = error
            else:
                self.error = data * 0
            if hasattr(header, "get"):
                self.header = header
            else:  # set as blank
                warn("WARNING: Blank header.")
                self.header = pyfits.Header()
            self.parse_header(self.header)

        if maskdata:
            if hasattr(self.data, "mask"):
                self.data.mask += np.isnan(self.data) + np.isinf(self.data)
                self.error.mask += np.isnan(self.data) + np.isinf(self.data)
            else:
                self.data = np.ma.masked_where(np.isnan(self.data) + np.isinf(self.data), self.data)
                self.error = np.ma.masked_where(np.isnan(self.data) + np.isinf(self.data), self.error)

        self.plotter = plotters.Plotter(self)
        self._register_fitters()
        self.specfit = fitters.Specfit(self, Registry=self.Registry)
        self.baseline = baseline.Baseline(self)
        self.speclines = speclines
        self._sort()

        # Special.  This needs to be modified to be more flexible; for now I need it to work for nh3
        self.plot_special = None
        self.plot_special_kwargs = {}

        if doplot:
            self.plotter(**plotkwargs)
Esempio n. 21
0
    def reset_limits(self,
                     xmin=None,
                     xmax=None,
                     ymin=None,
                     ymax=None,
                     reset_xlimits=True,
                     reset_ylimits=True,
                     ypeakscale=1.2,
                     silent=None,
                     use_window_limits=False,
                     **kwargs):
        """
        Automatically or manually reset the plot limits
        """
        # if not use_window_limits: use_window_limits = False
        if self.debug:
            frame = inspect.currentframe()
            args, _, _, values = inspect.getargvalues(frame)
            print zip(args, values)

        if use_window_limits:
            # this means DO NOT reset!
            # it simply sets self.[xy][min/max] = current value
            self.set_limits_from_visible_window()
        else:
            if silent is not None:
                self.silent = silent

            # if self.xmin and self.xmax:
            if (reset_xlimits or self.Spectrum.xarr.min().value < self.xmin
                    or self.Spectrum.xarr.max().value > self.xmax):
                if not self.silent:
                    warn(
                        "Resetting X-axis min/max because the plot is out of bounds."
                    )
                self.xmin = None
                self.xmax = None
            if xmin is not None: self.xmin = u.Quantity(xmin, self._xunit)
            elif self.xmin is None:
                self.xmin = u.Quantity(self.Spectrum.xarr.min().value,
                                       self._xunit)
            if xmax is not None: self.xmax = u.Quantity(xmax, self._xunit)
            elif self.xmax is None:
                self.xmax = u.Quantity(self.Spectrum.xarr.max().value,
                                       self._xunit)

            xpixmin = np.argmin(
                np.abs(self.Spectrum.xarr.value - self.xmin.value))
            xpixmax = np.argmin(
                np.abs(self.Spectrum.xarr.value - self.xmax.value))
            if xpixmin > xpixmax: xpixmin, xpixmax = xpixmax, xpixmin
            elif xpixmin == xpixmax:
                if reset_xlimits:
                    raise Exception(
                        "Infinite recursion error.  Maybe there are no valid data?"
                    )
                if not self.silent:
                    warn(
                        "ERROR: the X axis limits specified were invalid.  Resetting."
                    )
                self.reset_limits(reset_xlimits=True,
                                  ymin=ymin,
                                  ymax=ymax,
                                  reset_ylimits=reset_ylimits,
                                  ypeakscale=ypeakscale,
                                  **kwargs)
                return

            if self.ymin and self.ymax:
                # this is utter nonsense....
                if (self.Spectrum.data.max() < self.ymin
                        or self.Spectrum.data.min() > self.ymax
                        or reset_ylimits):
                    if not self.silent and not reset_ylimits:
                        warn(
                            "Resetting Y-axis min/max because the plot is out of bounds."
                        )
                    self.ymin = None
                    self.ymax = None

            if ymin is not None: self.ymin = ymin
            elif self.ymin is None:
                if hasattr(self.Spectrum.data, 'mask'):
                    yminval = self.Spectrum.data[xpixmin:xpixmax].min()
                else:
                    yminval = np.nanmin(self.Spectrum.data[xpixmin:xpixmax])
                # Increase the range fractionally.  This means dividing a positive #, multiplying a negative #
                if yminval < 0:
                    self.ymin = float(yminval) * float(ypeakscale)
                else:
                    self.ymin = float(yminval) / float(ypeakscale)

            if ymax is not None:
                self.ymax = ymax
            elif self.ymax is None:
                if hasattr(self.Spectrum.data, 'mask'):
                    ymaxval = ((self.Spectrum.data[xpixmin:xpixmax]).max() -
                               self.ymin)
                else:
                    ymaxval = (np.nanmax(self.Spectrum.data[xpixmin:xpixmax]) -
                               self.ymin)
                if ymaxval > 0:
                    self.ymax = float(ymaxval) * float(ypeakscale) + self.ymin
                else:
                    self.ymax = float(ymaxval) / float(ypeakscale) + self.ymin

            self.ymin += self.offset
            self.ymax += self.offset

        self.axis.set_xlim(
            self.xmin.value if hasattr(self.xmin, 'value') else self.xmin,
            self.xmax.value if hasattr(self.xmax, 'value') else self.xmax)
        self.axis.set_ylim(self.ymin, self.ymax)
Esempio n. 22
0
def open_1d_txt(filename, xaxcol=0, datacol=1, errorcol=2, 
        text_reader='simple', atpytype='ascii', **kwargs):
    """
    Attempt to read a 1D spectrum from a text file assuming wavelength as the
    first column, data as the second, and (optionally) error as the third.  

    Reading can be done either with atpy or a 'simple' reader.  If you have an
    IPAC, CDS, or formally formatted table, you'll want to use atpy.

    If you have a simply formatted file of the form, e.g.
    # name name
    # unit unit
    data data
    data data

    kwargs are passed to atpy.Table
    """
    if text_reader in ('simple','readcol') or not atpyOK:
        if not atpyOK:
            warn("WARNING: atpy not installed; will use simple reader instead.")
        
        if text_reader == 'simple':
            data, error, XAxis, T = simple_txt(filename, xaxcol = xaxcol, 
                datacol = datacol, errorcol = errorcol, **kwargs)
        elif text_reader == 'readcol':
            Tlist = readcol.readcol(filename, twod=False, **kwargs)
            XAxis = units.SpectroscopicAxis(Tlist[xaxcol])
            data = Tlist[datacol]
            error = Tlist[errorcol]

            T = dummy_class()
            Tdict = readcol.readcol(filename, asDict=True, **kwargs)

            T.data = dummy_class()
            T.data.dtype = dummy_class()
            T.data.dtype.names = hdr
            T.columns = {}
            T.columns[T.data.dtype.names[xaxcol]] = dummy_class()
            T.columns[T.data.dtype.names[xaxcol]].unit = colunits[xaxcol]
            T.columns[T.data.dtype.names[datacol]] = dummy_class()
            T.columns[T.data.dtype.names[datacol]].unit = colunits[datacol]

        
    elif text_reader in ('atpy','asciitable'):
        T = atpy.Table(filename, type=atpytype, masked=True, **kwargs)
        
        xarr = T.data[T.data.dtype.names[xaxcol]]
        data = T.data[T.data.dtype.names[datacol]]
        if len(T.columns) > errorcol:
            error = T.data[T.data.dtype.names[errorcol]]
        else:
            # assume uniform, zero error
            error = data*0 

        if 'xunits' in T.keywords:
            xunits = T.keywords['xunits']
        else:
            xunits = 'unknown'

        XAxis = units.SpectroscopicAxis(xarr,xunits)
    
    # Need this in Spectrum class to correctly parse header    
    T.xaxcol = xaxcol
    T.datacol = datacol 

    return data, error, XAxis, T
Esempio n. 23
0
def open_1d_pyfits(pyfits_hdu,
                   specnum=0,
                   wcstype='',
                   specaxis="1",
                   errspecnum=None,
                   autofix=True,
                   scale_keyword=None,
                   scale_action=operator.div,
                   verbose=False,
                   apnum=0,
                   **kwargs):
    """
    This is open_1d_fits but for a pyfits_hdu so you don't necessarily have to
    open a fits file
    """

    # force things that will be treated as strings to be strings
    # this is primarily to avoid problems with variables being passed as unicode
    wcstype = str(wcstype)
    specaxis = str(specaxis)

    hdr = pyfits_hdu._header
    if autofix:
        for card in hdr.cards:
            try:
                if verbose: card.verify('fix')
                else: card.verify('silentfix')
            except pyfits.VerifyError:
                hdr.__delitem__(card.key)
    data = pyfits_hdu.data

    # search for the correct axis (may be 1 or 3, unlikely to be 2 or others)
    # 1 = 1D spectrum
    # 3 = "3D" spectrum with a single x,y point (e.g., JCMT smurf/makecube)
    if hdr.get('NAXIS') > 1:
        for ii in xrange(1, hdr.get('NAXIS') + 1):
            ctype = hdr.get('CTYPE%i' % ii)
            if ctype in units.xtype_dict:
                specaxis = "%i" % ii

    if hdr.get('NAXIS') == 2:
        if hdr.get('WAT0_001') is not None:
            if 'multispec' in hdr.get('WAT0_001'):
                # treat as an Echelle spectrum from  IRAF
                warn("""
This looks like an Echelle spectrum.   You may want to load it
using pyspeckit.wrappers.load_IRAF_multispec.  The file will still
be successfully read if you continue, but the plotting and fitting packages
will run into errors.""")
                return read_echelle(pyfits_hdu)

        if isinstance(specnum, list):
            # allow averaging of multiple spectra (this should be modified
            # - each spectrum should be a Spectrum instance)
            spec = ma.array(data[specnum, :]).mean(axis=0)
        elif isinstance(specnum, int):
            spec = ma.array(data[specnum, :]).squeeze()
        else:
            raise TypeError(
                "Specnum is of wrong type (not a list of integers or an integer)."
                + "  Type: %s" % str(type(specnum)))
        if errspecnum is not None:
            # SDSS supplies weights, not errors.
            if hdr.get('TELESCOP') == 'SDSS 2.5-M':
                errspec = 1. / np.sqrt(ma.array(data[errspecnum, :]).squeeze())
            else:
                errspec = ma.array(data[errspecnum, :]).squeeze()
        else:
            errspec = spec * 0  # set error spectrum to zero if it's not in the data

    elif hdr.get('NAXIS') > 2:
        if hdr.get('BANDID2'):
            # this is an IRAF .ms.fits file with a 'background' in the 3rd dimension
            spec = ma.array(data[specnum, apnum, :]).squeeze()
        else:
            for ii in xrange(3, hdr.get('NAXIS') + 1):
                # only fail if extra axes have more than one row
                if hdr.get('NAXIS%i' % ii) > 1:
                    raise ValueError("Too many axes for open_1d_fits")
            spec = ma.array(data).squeeze()
        if errspecnum is None:
            errspec = spec * 0  # set error spectrum to zero if it's not in the data
    else:
        spec = ma.array(data).squeeze()
        if errspecnum is None:
            errspec = spec * 0  # set error spectrum to zero if it's not in the data

    if scale_keyword is not None:
        try:
            print "Found SCALE keyword %s.  Using %s to scale it" % (
                scale_keyword, scale_action)
            scaleval = hdr[scale_keyword]
            spec = scale_action(spec, scaleval)
            errspec = scale_action(errspec, scaleval)
        except (ValueError, KeyError) as e:
            pass

    xarr = None
    if hdr.get('ORIGIN') == 'CLASS-Grenoble':
        # Use the CLASS FITS definition (which is non-standard)
        # http://iram.fr/IRAMFR/GILDAS/doc/html/class-html/node84.html
        # F(n) = RESTFREQ + CRVALi + ( n - CRPIXi ) * CDELTi
        if verbose: print "Loading a CLASS .fits spectrum"
        dv = -1 * hdr.get('CDELT1')
        if hdr.get('RESTFREQ'):
            v0 = hdr.get('RESTFREQ') + hdr.get('CRVAL1')
        elif hdr.get('RESTF'):
            v0 = hdr.get('RESTF') + hdr.get('CRVAL1')
        else:
            warn("CLASS file does not have RESTF or RESTFREQ")
        p3 = hdr.get('CRPIX1')
    elif hdr.get(str('CD%s_%s%s' % (specaxis, specaxis, wcstype))):
        dv = hdr['CD%s_%s%s' % (specaxis, specaxis, wcstype)]
        v0 = hdr['CRVAL%s%s' % (specaxis, wcstype)]
        p3 = hdr['CRPIX%s%s' % (specaxis, wcstype)]
        hdr['CDELT%s' % specaxis] = dv
        if verbose:
            print "Using the FITS CD matrix.  PIX=%f VAL=%f DELT=%f" % (p3, v0,
                                                                        dv)
    elif hdr.get(str('CDELT%s%s' % (specaxis, wcstype))):
        dv = hdr['CDELT%s%s' % (specaxis, wcstype)]
        v0 = hdr['CRVAL%s%s' % (specaxis, wcstype)]
        p3 = hdr['CRPIX%s%s' % (specaxis, wcstype)]
        if verbose:
            print "Using the FITS CDELT value.  PIX=%f VAL=%f DELT=%f" % (
                p3, v0, dv)
    elif len(data.shape) > 1:
        if verbose:
            print "No CDELT or CD in header.  Assuming 2D input with 1st line representing the spectral axis."
        # try assuming first axis is X axis
        if hdr.get('CUNIT%s%s' % (specaxis, wcstype)):
            xarr = data[0, :]
            spec = data[1, :]
            if data.shape[0] > 2:
                errspec = data[2, :]
        else:
            raise TypeError(
                "Don't know what type of FITS file you've input; " +
                "its header is not FITS compliant and it doesn't look like it "
                + "was written by pyspeckit.")

    # Deal with logarithmic wavelength binning if necessary
    if xarr is None:
        if hdr.get('WFITTYPE') == 'LOG-LINEAR':
            xconv = lambda v: 10**((v - p3 + 1) * dv + v0)
            xarr = xconv(np.arange(len(spec)))
        else:
            xconv = lambda v: ((v - p3 + 1) * dv + v0)
            xarr = xconv(np.arange(len(spec)))

    # need to do something with this...
    restfreq = hdr.get('RESTFREQ')
    if restfreq is None: restfreq = hdr.get('RESTFRQ')

    XAxis = make_axis(xarr, hdr, wcstype=wcstype, specaxis=specaxis, **kwargs)

    return spec, errspec, XAxis, hdr
Esempio n. 24
0
    def reset_limits(self,xmin=None, xmax=None, ymin=None, ymax=None,
            reset_xlimits=True, reset_ylimits=True, ypeakscale=1.2,
            silent=None, use_window_limits=False, **kwargs):
        """
        Automatically or manually reset the plot limits
        """
        if self.debug:
            frame = inspect.currentframe()
            args, _, _, values = inspect.getargvalues(frame)
            print zip(args,values)

        if use_window_limits:
            # this means DO NOT reset!
            # it simply sets self.[xy][min/max] = current value
            self.set_limits_from_visible_window()
        else:
            if silent is not None:
                self.silent = silent

            if (self.Spectrum.xarr.max() < self.xmin or self.Spectrum.xarr.min() > self.xmax 
                    or reset_xlimits):
                if not self.silent: warn( "Resetting X-axis min/max because the plot is out of bounds." )
                self.xmin = None
                self.xmax = None
            if xmin is not None: self.xmin = xmin
            elif self.xmin is None: self.xmin=self.Spectrum.xarr.min()
            if xmax is not None: self.xmax = xmax
            elif self.xmax is None: self.xmax=self.Spectrum.xarr.max()

            xpixmin = np.argmin(np.abs(self.Spectrum.xarr-self.xmin))
            xpixmax = np.argmin(np.abs(self.Spectrum.xarr-self.xmax))
            if xpixmin>xpixmax: xpixmin,xpixmax = xpixmax,xpixmin
            elif xpixmin == xpixmax:
                if not self.silent: warn( "ERROR: the X axis limits specified were invalid.  Resetting." )
                self.reset_limits(reset_xlimits=True, ymin=ymin, ymax=ymax,
                                  reset_ylimits=reset_ylimits,
                                  ypeakscale=ypeakscale, **kwargs)
                return
            
            if (self.Spectrum.data.max() < self.ymin or self.Spectrum.data.min() > self.ymax
                    or reset_ylimits):
                if not self.silent and not reset_ylimits: warn( "Resetting Y-axis min/max because the plot is out of bounds." )
                self.ymin = None
                self.ymax = None

            if ymin is not None: self.ymin = ymin
            elif self.ymin is None:
                if hasattr(self.Spectrum.data, 'mask'):
                    yminval = self.Spectrum.data[xpixmin:xpixmax].min()
                else:
                    yminval = np.nanmin(self.Spectrum.data[xpixmin:xpixmax])
                # Increase the range fractionally.  This means dividing a positive #, multiplying a negative #
                if yminval < 0:
                    self.ymin = float(yminval)*float(ypeakscale)
                else:
                    self.ymin = float(yminval)/float(ypeakscale)

            if ymax is not None: self.ymax = ymax
            elif self.ymax is None:
                if hasattr(self.Spectrum.data, 'mask'):
                    ymaxval = ((self.Spectrum.data[xpixmin:xpixmax]).max()-self.ymin)
                else:
                    ymaxval = (np.nanmax(self.Spectrum.data[xpixmin:xpixmax])-self.ymin)
                if ymaxval > 0:
                    self.ymax = float(ymaxval) * float(ypeakscale) + self.ymin
                else:
                    self.ymax = float(ymaxval) / float(ypeakscale) + self.ymin

            self.ymin += self.offset
            self.ymax += self.offset

        self.axis.set_xlim(self.xmin,self.xmax)
        self.axis.set_ylim(self.ymin,self.ymax)
Esempio n. 25
0
    def reset_limits(self,xmin=None, xmax=None, ymin=None, ymax=None,
            reset_xlimits=True, reset_ylimits=True, ypeakscale=1.2,
            silent=None, use_window_limits=False, **kwargs):
        """
        Automatically or manually reset the plot limits
        """
        if self.debug:
            frame = inspect.currentframe()
            args, _, _, values = inspect.getargvalues(frame)
            print zip(args,values)

        if use_window_limits:
            # this means DO NOT reset!
            # it simply sets self.[xy][min/max] = current value
            self.set_limits_from_visible_window()
        else:
            if silent is not None:
                self.silent = silent

            if (self.Spectrum.xarr.max() < self.xmin or self.Spectrum.xarr.min() > self.xmax 
                    or reset_xlimits):
                if not self.silent: warn( "Resetting X-axis min/max because the plot is out of bounds." )
                self.xmin = None
                self.xmax = None
            if xmin is not None: self.xmin = xmin
            elif self.xmin is None: self.xmin=self.Spectrum.xarr.min()
            if xmax is not None: self.xmax = xmax
            elif self.xmax is None: self.xmax=self.Spectrum.xarr.max()

            xpixmin = np.argmin(np.abs(self.Spectrum.xarr-self.xmin))
            xpixmax = np.argmin(np.abs(self.Spectrum.xarr-self.xmax))
            if xpixmin>xpixmax: xpixmin,xpixmax = xpixmax,xpixmin
            elif xpixmin == xpixmax:
                if not self.silent: warn( "ERROR: the X axis limits specified were invalid.  Resetting." )
                self.reset_limits(reset_xlimits=True, ymin=ymin, ymax=ymax,
                                  reset_ylimits=reset_ylimits,
                                  ypeakscale=ypeakscale, **kwargs)
                return
            
            if (self.Spectrum.data.max() < self.ymin or self.Spectrum.data.min() > self.ymax
                    or reset_ylimits):
                if not self.silent and not reset_ylimits: warn( "Resetting Y-axis min/max because the plot is out of bounds." )
                self.ymin = None
                self.ymax = None

            if ymin is not None: self.ymin = ymin
            elif self.ymin is None:
                try:
                    self.ymin = np.nanmin(self.Spectrum.data[xpixmin:xpixmax])*ypeakscale + 0.0
                except TypeError:
                    # this is assumed to be a Masked Array error
                    self.ymin = self.Spectrum.data[xpixmin:xpixmax].min()*ypeakscale + 0.0
            if ymax is not None: self.ymax = ymax
            elif self.ymax is None:
                try:
                    self.ymax=(np.nanmax(self.Spectrum.data[xpixmin:xpixmax])-self.ymin) * ypeakscale + self.ymin
                except TypeError:
                    self.ymax=((self.Spectrum.data[xpixmin:xpixmax]).max()-self.ymin) * ypeakscale + self.ymin

            self.ymin += self.offset
            self.ymax += self.offset

        self.axis.set_xlim(self.xmin,self.xmax)
        self.axis.set_ylim(self.ymin,self.ymax)
Esempio n. 26
0
    def __init__(self,
                 filename=None,
                 filetype=None,
                 xarr=None,
                 data=None,
                 error=None,
                 header=None,
                 doplot=False,
                 maskdata=True,
                 plotkwargs={},
                 xarrkwargs={},
                 **kwargs):
        """
        Create a Spectrum object.

        Must either pass in a filename or ALL of xarr, data, and header, plus
        optionally error.

        kwargs are passed to the file reader

        Parameters
        ----------
        filename : string
            The file to read the spectrum from.  If data, xarr, and error are
            specified, leave filename blank.
        filetype : string
            Specify the file type (only needed if it cannot be automatically
            determined from the filename)
        xarr : `units.SpectroscopicAxis` or `np.ndarray`
            The X-axis of the data.  If it is an np.ndarray, you must pass
            `xarrkwargs` or a valid header if you want to use any of the unit
            functionality.
        data : `np.ndarray`
            The data array (must have same length as xarr)
        error : `np.ndarray` 
            The error array (must have same length as the data and xarr arrays)
        header : `pyfits.Header` or dict
            The header from which to read unit information.  Needs to be a
            `pyfits.Header` instance or another dictionary-like object with the
            appropriate information
        maskdata : boolean
            turn the array into a masked array with all nan and inf values masked
        doplot : boolean
            Plot the spectrum after loading it?
        plotkwargs : dict
            keyword arguments to pass to the plotter
        xarrkwargs : dict
            keyword arguments to pass to the SpectroscopicAxis initialization
            (can be used in place of a header)

        Examples
        --------

        >>> sp = pyspeckit.Spectrum(data=np.random.randn(100),
                    xarr=np.linspace(-50, 50, 100), error=np.ones(100)*0.1, 
                    xarrkwargs={'unit':'km/s', 'refX':4.829, 'refX_units':'GHz',
                        'xtype':'VLSR-RAD'}, header={})

        >>> xarr = pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,100),
                    units='km/s', refX=6562.83, refX_units='angstroms')
        >>> data = np.random.randn(100)*5 + np.random.rand(100)*100
        >>> err = np.sqrt(data/5.)*5. # Poisson noise
        >>> sp = pyspeckit.Spectrum(data=data, error=err, xarr=xarr, header={}) 
        
        >>> # if you already have a simple fits file
        >>> sp = pyspeckit.Spectrum('test.fits')
        """

        if filename:
            if filetype is None:
                suffix = filename.rsplit('.', 1)[1]
                if suffix in readers.suffix_types:
                    # use the default reader for that suffix
                    filetype = readers.suffix_types[suffix][0]
                    reader = readers.readers[filetype]
                else:
                    raise TypeError("File with suffix %s is not recognized." %
                                    suffix)
            else:
                if filetype in readers.readers:
                    reader = readers.readers[filetype]
                else:
                    raise TypeError("Filetype %s not recognized" % filetype)

            self.data, self.error, self.xarr, self.header = reader(
                filename, **kwargs)

            # these should probably be replaced with registerable function s...
            if filetype in ('fits', 'tspec', 'pyfits', 'sdss'):
                self.parse_header(self.header)
            elif filetype is 'txt':
                self.parse_text_header(self.header)
            elif filetype in ('hdf5', 'h5'):
                self.parse_hdf5_header(self.header)

            if isinstance(filename, str):
                self.fileprefix = filename.rsplit(
                    '.', 1)[0]  # Everything prior to .fits or .txt
        elif xarr is not None and data is not None:
            # technically, this is unpythonic.  But I don't want to search for all 10 attributes required.
            if issubclass(type(xarr), units.SpectroscopicAxis):
                self.xarr = xarr
            else:
                self.xarr = units.SpectroscopicAxis(xarr, **xarrkwargs)
            self.data = data
            if error is not None:
                self.error = error
            else:
                self.error = data * 0
            if hasattr(header, 'get'):
                self.header = header
            else:  # set as blank
                warn("WARNING: Blank header.")
                self.header = pyfits.Header()
            self.parse_header(self.header)

        if maskdata:
            if hasattr(self.data, 'mask'):
                self.data.mask += np.isnan(self.data) + np.isinf(self.data)
                self.error.mask += np.isnan(self.data) + np.isinf(self.data)
            else:
                self.data = np.ma.masked_where(
                    np.isnan(self.data) + np.isinf(self.data), self.data)
                self.error = np.ma.masked_where(
                    np.isnan(self.data) + np.isinf(self.data), self.error)

        self.plotter = plotters.Plotter(self)
        self._register_fitters()
        self.specfit = fitters.Specfit(self, Registry=self.Registry)
        self.baseline = baseline.Baseline(self)
        self.speclines = speclines
        self._sort()

        # Special.  This needs to be modified to be more flexible; for now I need it to work for nh3
        self.plot_special = None
        self.plot_special_kwargs = {}

        if doplot: self.plotter(**plotkwargs)
Esempio n. 27
0
    def lmfitter(self, xax, data, err=None, parinfo=None, quiet=True, debug=False, **kwargs):
        """
        Use lmfit instead of mpfit to do the fitting

        Parameters
        ----------
        xax : SpectroscopicAxis 
            The X-axis of the spectrum
        data : ndarray
            The data to fit
        err : ndarray (optional)
            The error on the data.  If unspecified, will be uniform unity
        parinfo : ParinfoList
            The guesses, parameter limits, etc.  See
            `pyspeckit.spectrum.parinfo` for details
        quiet : bool
            If false, print out some messages about the fitting

        """
        try:
            import lmfit
        except ImportError as e:
            raise ImportError( "Could not import lmfit, try using mpfit instead." )

        self.xax = xax # the 'stored' xax is just a link to the original
        if hasattr(xax,'convert_to_unit') and self.fitunits is not None:
            # some models will depend on the input units.  For these, pass in an X-axis in those units
            # (gaussian, voigt, lorentz profiles should not depend on units.  Ammonia, formaldehyde,
            # H-alpha, etc. should)
            xax = copy.copy(xax)
            xax.convert_to_unit(self.fitunits, quiet=quiet)
        elif self.fitunits is not None:
            raise TypeError("X axis does not have a convert method")

        if np.any(np.isnan(data)) or np.any(np.isinf(data)):
            err[np.isnan(data) + np.isinf(data)] = np.inf
            data[np.isnan(data) + np.isinf(data)] = 0

        if parinfo is None:
            parinfo, kwargs = self._make_parinfo(debug=debug, **kwargs)
            if debug:
                print parinfo

        LMParams = parinfo.as_Parameters()
        if debug:
            print "LMParams: ","\n".join([repr(p) for p in LMParams.values()])
            print "parinfo:  ",parinfo
        minimizer = lmfit.minimize(self.lmfitfun(xax,np.array(data),err,debug=debug),LMParams,**kwargs)
        if not quiet:
            print "There were %i function evaluations" % (minimizer.nfev)
        #modelpars = [p.value for p in parinfo.values()]
        #modelerrs = [p.stderr for p in parinfo.values() if p.stderr is not None else 0]

        self.LMParams = LMParams
        self.parinfo._from_Parameters(LMParams)
        if debug:
            print LMParams
            print parinfo

        self.mp = minimizer
        self.mpp = self.parinfo.values
        self.mpperr = self.parinfo.errors
        self.mppnames = self.parinfo.names
        modelkwargs = {}
        modelkwargs.update(self.modelfunc_kwargs)
        self.model = self.n_modelfunc(self.parinfo, **modelkwargs)(xax)
        if hasattr(minimizer,'chisqr'):
            chi2 = minimizer.chisqr
        else:
            try:
                chi2 = (((data-self.model)/err)**2).sum()
            except TypeError:
                chi2 = ((data-self.model)**2).sum()
        if np.isnan(chi2):
            warn( "Warning: chi^2 is nan" )
    
        if hasattr(self.mp,'ier') and self.mp.ier not in [1,2,3,4]:
            print "Fitter failed: %s, %s" % (self.mp.message, self.mp.lmdif_message)

        return self.mpp,self.model,self.mpperr,chi2
Esempio n. 28
0
def make_axis(xarr,
              hdr,
              specname=None,
              wcstype='',
              specaxis="1",
              verbose=True,
              **kwargs):
    """
    Parse parameters from a .fits header into required SpectroscopicAxis
    parameters
    """

    #DEBUG if wcstype is not '': print "Loading file with WCSTYPE %s" % wcstype

    xunits = hdr.get('CUNIT%s%s' % (specaxis, wcstype))
    if hdr.get('ORIGIN') == 'CLASS-Grenoble' and xunits is None:
        # CLASS default
        xunits = 'Hz'

    # SDSS doesn't use FITS standard! Argh.
    if hdr.get('TELESCOP') == 'SDSS 2.5-M':
        xunits = 'angstroms'

    # IRAF also doesn't use the same standard
    if xunits is None:
        if hdr.get('WAT1_001') is not None:
            pairs = hdr.get('WAT1_001').split()
            pdict = dict([s.split("=") for s in pairs])
            if 'units' in pdict:
                xunits = pdict['units']

    if hdr.get('REFFREQ' + wcstype):
        refX = hdr.get('REFFREQ' + wcstype)
    elif hdr.get('RESTFREQ' + wcstype):
        refX = hdr.get('RESTFREQ' + wcstype)
    elif hdr.get('RESTFRQ' + wcstype):
        refX = hdr.get('RESTFRQ' + wcstype)
    else:
        if verbose:
            warn(
                "Warning: No reference frequency found.  Velocity transformations will not be possible unless you set a reference frequency/wavelength"
            )
        refX = None

    if hdr.get('CTYPE%s%s' % (specaxis, wcstype)):
        xtype = hdr.get('CTYPE%s%s' % (specaxis, wcstype))
    else:
        xtype = 'VLSR'

    if hdr.get('VELDEF'):
        convention, frame = units.parse_veldef(hdr['VELDEF'])
        vframe = hdr.get('VFRAME') if hdr.get('VFRAME') is not None else 0.0
    else:
        convention, frame = None, None
        vframe = 0.0

    XAxis = units.SpectroscopicAxis(xarr,
                                    xunits,
                                    xtype=xtype,
                                    refX=refX,
                                    velocity_convention=convention,
                                    frame=frame,
                                    frame_offset=vframe,
                                    **kwargs)

    return XAxis
Esempio n. 29
0
def open_1d_pyfits(pyfits_hdu, specnum=0, wcstype='', specaxis="1",
        errspecnum=None, autofix=True, scale_keyword=None,
        scale_action=operator.div, verbose=False, apnum=0, **kwargs):
    """
    This is open_1d_fits but for a pyfits_hdu so you don't necessarily have to
    open a fits file
    """

    # force things that will be treated as strings to be strings
    # this is primarily to avoid problems with variables being passed as unicode
    wcstype = str(wcstype)
    specaxis = str(specaxis)

    hdr = pyfits_hdu._header
    if autofix: 
        for card in hdr.cards:
            try:
                if verbose: card.verify('fix')
                else: card.verify('silentfix')
            except pyfits.VerifyError:
                hdr.__delitem__(card.key)
    data = pyfits_hdu.data

    # search for the correct axis (may be 1 or 3, unlikely to be 2 or others)
    # 1 = 1D spectrum
    # 3 = "3D" spectrum with a single x,y point (e.g., JCMT smurf/makecube)
    if hdr.get('NAXIS') > 1:
        for ii in xrange(1,hdr.get('NAXIS')+1):
            ctype = hdr.get('CTYPE%i'%ii)
            if ctype in units.xtype_dict:
                specaxis="%i" % ii

    if hdr.get('NAXIS') == 2:
        if hdr.get('WAT0_001') is not None:
            if 'multispec' in hdr.get('WAT0_001'):
                # treat as an Echelle spectrum from  IRAF
                warn("""
This looks like an Echelle spectrum.   You may want to load it
using pyspeckit.wrappers.load_IRAF_multispec.  The file will still
be successfully read if you continue, but the plotting and fitting packages
will run into errors.""")
                return read_echelle(pyfits_hdu)

        if isinstance(specnum,list):
            # allow averaging of multiple spectra (this should be modified
            # - each spectrum should be a Spectrum instance)
            spec = ma.array(data[specnum,:]).mean(axis=0)
        elif isinstance(specnum,int):
            spec = ma.array(data[specnum,:]).squeeze()
        else:
            raise TypeError(
                "Specnum is of wrong type (not a list of integers or an integer)." +
                "  Type: %s" %
                str(type(specnum)))
        if errspecnum is not None:
            # SDSS supplies weights, not errors.    
            if hdr.get('TELESCOP') == 'SDSS 2.5-M':
                errspec = 1. / np.sqrt(ma.array(data[errspecnum,:]).squeeze())
            else:       
                errspec = ma.array(data[errspecnum,:]).squeeze()
        else:
            errspec = spec*0 # set error spectrum to zero if it's not in the data

    elif hdr.get('NAXIS') > 2:
        if hdr.get('BANDID2'):
            # this is an IRAF .ms.fits file with a 'background' in the 3rd dimension
            spec = ma.array(data[specnum,apnum,:]).squeeze()
        else:
            for ii in xrange(3,hdr.get('NAXIS')+1):
                # only fail if extra axes have more than one row
                if hdr.get('NAXIS%i' % ii) > 1:
                    raise ValueError("Too many axes for open_1d_fits")
            spec = ma.array(data).squeeze()
        if errspecnum is None: 
            errspec = spec*0 # set error spectrum to zero if it's not in the data
    else:
        spec = ma.array(data).squeeze()
        if errspecnum is None: errspec = spec*0 # set error spectrum to zero if it's not in the data

    if scale_keyword is not None:
        try:
            print "Found SCALE keyword %s.  Using %s to scale it" % (scale_keyword,scale_action)
            scaleval = hdr[scale_keyword]
            spec = scale_action(spec,scaleval)
            errspec = scale_action(errspec,scaleval)
        except (ValueError, KeyError) as e:
            pass

    xarr = None
    if hdr.get('ORIGIN') == 'CLASS-Grenoble':
        # Use the CLASS FITS definition (which is non-standard)
        # http://iram.fr/IRAMFR/GILDAS/doc/html/class-html/node84.html
        # F(n) = RESTFREQ + CRVALi + ( n - CRPIXi ) * CDELTi
        if verbose: print "Loading a CLASS .fits spectrum"
        dv = -1*hdr.get('CDELT1')
        if hdr.get('RESTFREQ'):
            v0 = hdr.get('RESTFREQ') + hdr.get('CRVAL1')
        elif hdr.get('RESTF'):
            v0 = hdr.get('RESTF') + hdr.get('CRVAL1')
        else:
            warn("CLASS file does not have RESTF or RESTFREQ")
        p3 = hdr.get('CRPIX1')
    elif hdr.get(str('CD%s_%s%s' % (specaxis,specaxis,wcstype))):
        dv = hdr['CD%s_%s%s' % (specaxis,specaxis,wcstype)]
        v0 = hdr['CRVAL%s%s' % (specaxis,wcstype)]
        p3 = hdr['CRPIX%s%s' % (specaxis,wcstype)]
        hdr['CDELT%s' % specaxis] = dv
        if verbose: print "Using the FITS CD matrix.  PIX=%f VAL=%f DELT=%f" % (p3,v0,dv)
    elif hdr.get(str('CDELT%s%s' % (specaxis,wcstype))):
        dv = hdr['CDELT%s%s' % (specaxis,wcstype)]
        v0 = hdr['CRVAL%s%s' % (specaxis,wcstype)]
        p3 = hdr['CRPIX%s%s' % (specaxis,wcstype)]
        if verbose: print "Using the FITS CDELT value.  PIX=%f VAL=%f DELT=%f" % (p3,v0,dv)
    elif len(data.shape) > 1:
        if verbose: print "No CDELT or CD in header.  Assuming 2D input with 1st line representing the spectral axis."
        # try assuming first axis is X axis
        if hdr.get('CUNIT%s%s' % (specaxis,wcstype)):
            xarr = data[0,:]
            spec = data[1,:]
            if data.shape[0] > 2:
                errspec = data[2,:]
        else:
            raise TypeError("Don't know what type of FITS file you've input; "+
                "its header is not FITS compliant and it doesn't look like it "+
                "was written by pyspeckit.")

    # Deal with logarithmic wavelength binning if necessary
    if xarr is None:
        if hdr.get('WFITTYPE') == 'LOG-LINEAR':
            xconv = lambda v: 10**((v-p3+1)*dv+v0)
            xarr = xconv(np.arange(len(spec)))
        else:
            xconv = lambda v: ((v-p3+1)*dv+v0)
            xarr = xconv(np.arange(len(spec)))
    
    # need to do something with this...
    restfreq = hdr.get('RESTFREQ')
    if restfreq is None: restfreq= hdr.get('RESTFRQ')

    XAxis = make_axis(xarr,hdr,wcstype=wcstype,specaxis=specaxis,**kwargs)

    return spec,errspec,XAxis,hdr
Esempio n. 30
0
 def units(self, value):
     warn("'units' is deprecated; please use 'unit'")
     self._unit = value