コード例 #1
0
def test_template_match_inverse_variance():
    """
    Test template_match when both observed and template spectra have the same wavelength axis.
    """
    # Seed np.random so that results are consistent
    np.random.seed(42)

    # Create test spectra
    spec_axis = np.linspace(0, 50, 50) * u.AA
    spec = Spectrum1D(spectral_axis=spec_axis,
                      flux=np.random.randn(50) * u.Jy,
                      uncertainty=InverseVariance(1 / np.random.sample(50)**2,
                                                  unit='1 / Jy2'))

    spec1 = Spectrum1D(spectral_axis=spec_axis,
                       flux=np.random.randn(50) * u.Jy,
                       uncertainty=InverseVariance(1 / np.random.sample(50)**2,
                                                   unit='1 / Jy2'))

    # Get result from template_match
    tm_result = template_comparison.template_match(spec, spec1)

    # Create new spectrum for comparison
    spec_result = Spectrum1D(
        spectral_axis=spec_axis,
        flux=spec1.flux *
        template_comparison._normalize_for_template_matching(spec, spec1))

    assert quantity_allclose(tm_result[0].flux,
                             spec_result.flux,
                             atol=0.01 * u.Jy)
    np.testing.assert_almost_equal(tm_result[3], 40093.28353756253)
コード例 #2
0
def load_sdss_boss(path, hdu=1, **kwargs):
    r"""
    Read a spectrum from a path that is described by the SDSS BOSS 'spec' data model
    https://data.SDSS.org/datamodel/files/BOSS_SPECTRO_REDUX/RUN2D/spectra/PLATE4/spec.html

    :param path:
        The local path of the spectrum.

    :returns:
        A `specutils.Spectrum1D` object.
    """
    units = u.Unit("1e-17 erg / (Angstrom cm2 s)")
    
    with fits.open(path, **kwargs) as image:
    
        spectral_axis = 10**image[hdu].data["loglam"] * u.Angstrom

        flux = np.atleast_2d(image[hdu].data["flux"]) * units
        uncertainty = InverseVariance(image[hdu].data["ivar"].reshape(flux.shape))

        meta = OrderedDict([
            ("header", image[0].header),
            ("hdu_headers", [hdu.header for hdu in image]),
            ("bitmask", dict(and_mask=image[hdu].data["and_mask"].reshape(flux.shape), 
                             or_mask=image[hdu].data["or_mask"].reshape(flux.shape))),
            ("wavelength", image[hdu].data["wdisp"]),
            ("model", image[hdu].data["model"].reshape(flux.shape)),
        ])

    return Spectrum1D(spectral_axis=spectral_axis, flux=flux, uncertainty=uncertainty, meta=meta)
コード例 #3
0
def load_sdss_mastar(path, hdu=1, **kwargs):
    r"""
    Read a list of spectrum from a path that is described by the SDSS MaNGA MaStar data model,
    which actually describes a collection of spectra of different sources:
    https://data.sdss.org/datamodel/files/MANGA_SPECTRO_MASTAR/DRPVER/MPROCVER/mastar-goodspec-DRPVER-MPROCVER.html
    
    :param path:
        The local path of the spectrum.

    :returns:
        A `specutils.Spectrum1D` object.
    """
    spectra = []

    units = u.Unit("1e-17 erg / (Angstrom cm2 s)")

    with fits.open(path, **kwargs) as image:

        _hdu = image[hdu]
        for i in range(_hdu.header["NAXIS2"]):

            meta = OrderedDict(zip(_hdu.data.dtype.names, _hdu.data[i]))

            spectral_axis = meta.pop("WAVE") * u.Angstrom
            flux = np.atleast_2d(meta.pop("FLUX") * units)
            uncertainty = InverseVariance(meta.pop("IVAR").reshape(flux.shape))

            spectra.append(Spectrum1D(spectral_axis=spectral_axis, 
                                      flux=flux, uncertainty=uncertainty, meta=meta))

    return SpectrumList(spectra)
コード例 #4
0
def load_sdss_apstar(path, **kwargs):
    r"""
    Read a spectrum from a path that is described by the SDSS apStar data model
    https://data.SDSS.org/datamodel/files/APOGEE_REDUX/APRED_VERS/APSTAR_VERS/TELESCOPE/LOCATION_ID/apStar.html

    :param path:
        The local path of the spectrum.

    :returns:
        A `specutils.Spectrum1D` object.
    """
    units = u.Unit("1e-17 erg / (Angstrom cm2 s)")


    with fits.open(path, **kwargs) as image:
        # Build spectral axis ourselves because specutils does not handle
        # log-linear transformations yet.
        spectral_axis = _wcs_log_linear(image[1].header)

        data_slice = kwargs.get("data_slice", None)
        if data_slice is None:
            slicer = np.atleast_2d
        else:
            slicer = lambda _: np.atleast_2d(_)[data_slice]
        
        flux = slicer(image[1].data) * units
        uncertainty = InverseVariance(slicer(image[2].data)**-2)

        verbosity = kwargs.get("verbosity", 1)

        snr = [image[0].header["SNR"]]
        n_visits = image[0].header["NVISITS"]
        if n_visits > 1:
            snr.append(snr[0])
            snr.extend([image[0].header[f"SNRVIS{i}"] for i in range(1, 1 + n_visits)])

        if data_slice is not None:
            snr = snr[data_slice]

        meta = OrderedDict([
            ("header", image[0].header),
            ("bitmask", slicer(image[3].data)),
            ("snr", snr)
        ])
        if verbosity >= 1:
            meta["hdu_headers"] = [hdu.header for hdu in image]

        if verbosity >= 2:
            meta.update(OrderedDict([
                ("sky_flux", slicer(image[4].data) * units),
                ("sky_error", slicer(image[5].data) * units),
                ("telluric_flux", slicer(image[6].data) * units),
                ("telluric_error", slicer(image[7].data) * units),
                ("lsf_coefficients", slicer(image[8].data)),
                ("rv_ccf_structure", slicer(image[9].data)),
            ]))

    return Spectrum1D(spectral_axis=spectral_axis, flux=flux, uncertainty=uncertainty, meta=meta)
コード例 #5
0
def _load_manga_spectra(hdulist, per_unit=None, transpose=None):
    """ Return a MaNGA Spectrum1D object

    Returns a Spectrum1D object for a MaNGA data files.  Set
    `transpose` kwarg to True for MaNGA cubes, as they are flipped relative
    to what Spectrum1D expects.  Use the `per_unit` kwarg to indicate the
    "spaxel" or "fiber" unit for cubes and rss files, respectively.

    Parameters
    ----------
    hdulist : fits.HDUList
        A MaNGA read astropy fits HDUList
    per_unit : astropy.units.Unit
        An astropy unit to divide the default flux unit by
    transpose : bool
        If True, transpose the data arrays

    Returns
    -------
    Spectrum1D
        The spectrum contained in the file.
    """
    unit = u.Unit('1e-17 erg / (Angstrom cm2 s)')
    if per_unit:
        unit = unit / per_unit

    hdr = hdulist['PRIMARY'].header
    wave = hdulist['WAVE'].data * u.angstrom

    if transpose:
        flux = hdulist['FLUX'].data.T * unit
        ivar = InverseVariance(hdulist["IVAR"].data.T)
        # SDSS masks are arrays of bit values storing multiple boolean conditions.
        # Setting non-zero bit values to True to map to specutils standard
        mask = hdulist['MASK'].data.T != 0
    else:
        flux = hdulist['FLUX'].data * unit
        ivar = InverseVariance(hdulist["IVAR"].data)
        mask = hdulist['MASK'].data != 0

    return Spectrum1D(flux=flux,
                      meta={'header': hdr},
                      spectral_axis=wave,
                      uncertainty=ivar,
                      mask=mask)
コード例 #6
0
def spPlate_loader(file_obj, limit=None, **kwargs):
    """
    Loader for SDSS spPlate files, reading flux spectra from all fibres into single array.

    Parameters
    ----------
    file_obj: str, file-like, or HDUList
           FITS file name, object (provided from name by Astropy I/O Registry),
           or HDUList (as resulting from astropy.io.fits.open()).

    limit : :class:`int`, optional
        If set, only return the first `limit` spectra in `flux` array.

    Returns
    -------
    Spectrum1D
        The spectra represented by the wavelength solution from the header WCS
        and the data array of the primary HDU (typically 640 along dimension 1).
    """
    if isinstance(file_obj, fits.hdu.hdulist.HDUList):
        hdulist = file_obj
    elif fits.util.fileobj_closed(file_obj):
        hdulist = fits.open(file_obj.name, **kwargs)
    else:
        hdulist = fits.open(file_obj, **kwargs)

    header = hdulist[0].header
    meta = {'header': header}
    wcs = WCS(header).dropaxis(1)
    if limit is None:
        limit = header['NAXIS2']

    bunit = header.get('BUNIT', '1e-17 erg / (Angstrom cm2 s)')
    if 'Ang' in bunit and 'strom' not in bunit:
        bunit = bunit.replace('Ang', 'Angstrom')
    flux_unit = Unit(bunit)
    flux = hdulist[0].data[0:limit, :] * flux_unit
    uncertainty = InverseVariance(hdulist[1].data[0:limit, :] / flux_unit**2)

    # dispersion along NAXIS1 from the WCS
    wcs = WCS(header).dropaxis(1)
    dispersion = wcs.pixel_to_world(np.arange(flux.shape[-1]))
    # convert out of logspace (default for spSpec/spPlate spectra)?
    if header.get('DC-Flag', 1) == 1:
        dispersion = 10**dispersion
    dispersion_unit = Unit('Angstrom')

    mask = hdulist[2].data[0:limit, :] != 0
    meta['plugmap'] = Table.read(hdulist[5])[0:limit]

    if not isinstance(file_obj, fits.hdu.hdulist.HDUList):
        hdulist.close()

    return Spectrum1D(flux=flux, spectral_axis=dispersion*dispersion_unit,
                      uncertainty=uncertainty, meta=meta, mask=mask)
コード例 #7
0
def load_sdss_apvisit(path, **kwargs):
    r"""
    Read a spectrum from a path that is described by the SDSS apVisit data model
    https://data.SDSS.org/datamodel/files/APOGEE_REDUX/APRED_VERS/TELESCOPE/PLATE_ID/MJD5/apVisit.html

    :param path:
        The local path of the spectrum.

    :returns:
        A `specutils.Spectrum1D` object.
    """
    units = u.Unit("1e-17 erg / (Angstrom cm2 s)")

    with fits.open(path, **kwargs) as image:
        image = fits.open(path, **kwargs)

        order_and_shape = lambda A: A.flatten()[::-1]

        spectral_axis = order_and_shape(image[4].data) * u.Angstrom
        flux = order_and_shape(image[1].data) * units
        uncertainty = InverseVariance(order_and_shape(image[2].data)**-2)

        # Multiple spectra, so need to generate these and send back a SpectrumList
        meta = OrderedDict([
            ("header", image[0].header),
            ("snr", image[0].header["snr"]),
        ])
        verbosity = kwargs.get("verbosity", 1)
        if verbosity >= 1:
            meta["hdu_headers"] = [hdu.header for hdu in image]

        meta.update(OrderedDict([
            ("bitmask", order_and_shape(image[3].data)),
        ]))

        if verbosity >= 2:
            meta.update(OrderedDict([
                ("sky_flux", order_and_shape(image[5].data) * units),
                ("sky_error", order_and_shape(image[6].data) * units),
                ("telluric_flux", order_and_shape(image[7].data) * units),
                ("telluric_error", order_and_shape(image[8].data) * units),
                ("wavelength_coefficients", image[9].data),
                ("lsf_coefficients", image[10].data)
            ]))

        spectrum = Spectrum1D(
            spectral_axis=spectral_axis,
            flux=flux, 
            uncertainty=uncertainty, 
            meta=meta
        )

    return spectrum
コード例 #8
0
    def test_inverse_variance_uncert(self):
        ccd = self.ccd
        uncert = InverseVariance(0.01 * np.ones(self.shape),
                                 unit=self.unit + '-2')
        ccd.uncertainty = uncert

        f = _extract_ccddata(ccd)
        assert_equal(f['data'], 100 * np.ones(self.shape))
        assert_equal(f['mask'], self.mask)
        assert_equal(f['unit'], self.unit)
        assert_equal(f['meta'], self.meta)
        assert_equal(f['uncertainty'], 10 * np.ones(self.shape))
コード例 #9
0
ファイル: sdss.py プロジェクト: rosteen/specutils
def spPlate_loader(file_obj, limit=None, **kwargs):
    """
    Loader for SDSS spPlate files, reading flux spectra from all fibres into single array.

    Parameters
    ----------
    file_obj: str, file-like, or HDUList
           FITS file name, object (provided from name by Astropy I/O Registry),
           or HDUList (as resulting from astropy.io.fits.open()).

    limit : :class:`int`, optional
        If set, only return the first `limit` spectra in `flux` array.

    Returns
    -------
    Spectrum1D
        The spectra represented by the wavelength solution from the header WCS
        and the data array of the primary HDU (typically 640 along dimension 1).
    """
    with read_fileobj_or_hdulist(file_obj, **kwargs) as hdulist:
        header = hdulist[0].header
        meta = {'header': header}
        wcs = WCS(header).dropaxis(1)
        if limit is None:
            limit = header['NAXIS2']

        bunit = header.get('BUNIT', '1e-17 erg / (Angstrom cm2 s)')
        if 'Ang' in bunit and 'strom' not in bunit:
            bunit = bunit.replace('Ang', 'Angstrom')
        flux_unit = Unit(bunit)
        flux = hdulist[0].data[0:limit, :] * flux_unit
        uncertainty = InverseVariance(hdulist[1].data[0:limit, :] /
                                      flux_unit**2)

        # Fix the WCS if it is claimed to be linear
        if header.get('DC-Flag', 1) == 1:
            fixed_wcs = _sdss_wcs_to_log_wcs(wcs)
        else:
            fixed_wcs = wcs

        mask = hdulist[2].data[0:limit, :] != 0
        meta['plugmap'] = Table.read(hdulist[5])[0:limit]

    return Spectrum1D(flux=flux,
                      wcs=fixed_wcs,
                      uncertainty=uncertainty,
                      meta=meta,
                      mask=mask)
コード例 #10
0
ファイル: test_resample.py プロジェクト: robelgeda/specutils
def test_same_grid_fluxconserving(simulated_spectra):
    """
    Test that feeding in the original dispersion axis returns the
    same flux after resampling.
    """
    input_spectra = simulated_spectra.s1_um_mJy_e1
    input_spectra.uncertainty = InverseVariance(
        [0.5] * len(simulated_spectra.s1_um_mJy_e1.flux))

    inst = FluxConservingResampler()
    results = inst(input_spectra, simulated_spectra.s1_um_mJy_e1.spectral_axis)

    assert np.allclose(np.array(simulated_spectra.s1_um_mJy_e1.flux),
                       np.array(results.flux))
    assert np.allclose(input_spectra.uncertainty.array,
                       results.uncertainty.array)
コード例 #11
0
def spec_loader(file_obj, **kwargs):
    """
    Loader for SDSS-III/IV optical spectrum "spec" files.

    Parameters
    ----------
    file_obj: str, file-like, or HDUList
          FITS file name, object (provided from name by Astropy I/O Registry),
          or HDUList (as resulting from astropy.io.fits.open()).

    Returns
    -------
    data: Spectrum1D
        The spectrum that is represented by the 'loglam' (wavelength) and 'flux'
        data columns in the BINTABLE extension of the FITS `file_obj`.
    """
    if isinstance(file_obj, fits.hdu.hdulist.HDUList):
        hdulist = file_obj
    elif fits.util.fileobj_closed(file_obj):
        hdulist = fits.open(file_obj.name, **kwargs)
    else:
        hdulist = fits.open(file_obj, **kwargs)

    header = hdulist[0].header
    name = header.get('NAME')
    meta = {'header': header}

    bunit = header.get('BUNIT', '1e-17 erg / (Angstrom cm2 s)')
    if 'Ang' in bunit and 'strom' not in bunit:
        bunit = bunit.replace('Ang', 'Angstrom')
    flux_unit = Unit(bunit)

    # spectrum is in HDU 1
    flux = hdulist[1].data['flux'] * flux_unit

    uncertainty = InverseVariance(hdulist[1].data['ivar'] / flux_unit**2)

    dispersion = 10**hdulist[1].data['loglam']
    dispersion_unit = Unit('Angstrom')

    mask = hdulist[1].data['and_mask'] != 0

    if not isinstance(file_obj, fits.hdu.hdulist.HDUList):
        hdulist.close()

    return Spectrum1D(flux=flux, spectral_axis=dispersion * dispersion_unit,
                      uncertainty=uncertainty, meta=meta, mask=mask)
コード例 #12
0
ファイル: test_smoothing.py プロジェクト: aragilar/specutils
def test_smooth_custom_kernel_uncertainty(simulated_spectra):
    """
    Test CustomKernel smoothing with correct parmaeters.
    """

    np.random.seed(42)

    # Create a custom kernel (some weird asymmetric-ness)
    numpy_kernel = np.array([0.5, 1, 2, 0.5, 0.2])
    numpy_kernel = numpy_kernel / np.sum(numpy_kernel)
    custom_kernel = convolution.CustomKernel(numpy_kernel)

    spec1 = simulated_spectra.s1_um_mJy_e1
    uncertainty = np.abs(np.random.random(spec1.flux.shape))

    # Test StdDevUncertainty
    spec1.uncertainty = StdDevUncertainty(uncertainty)

    spec1_smoothed = convolution_smooth(spec1, custom_kernel)
    tt = convolution.convolve(1 / (spec1.uncertainty.array**2), custom_kernel)
    uncertainty_smoothed_astropy = 1 / np.sqrt(tt)

    assert np.allclose(spec1_smoothed.uncertainty.array,
                       uncertainty_smoothed_astropy)

    # Test VarianceUncertainty
    spec1.uncertainty = VarianceUncertainty(uncertainty)

    spec1_smoothed = convolution_smooth(spec1, custom_kernel)
    uncertainty_smoothed_astropy = 1 / convolution.convolve(
        1 / spec1.uncertainty.array, custom_kernel)
    assert np.allclose(spec1_smoothed.uncertainty.array,
                       uncertainty_smoothed_astropy)

    # Test InverseVariance
    spec1.uncertainty = InverseVariance(uncertainty)

    spec1_smoothed = convolution_smooth(spec1, custom_kernel)
    uncertainty_smoothed_astropy = convolution.convolve(
        spec1.uncertainty.array, custom_kernel)
    assert np.allclose(spec1_smoothed.uncertainty.array,
                       uncertainty_smoothed_astropy)
コード例 #13
0
def _load_manga_spectra(hdulist, per_unit=None):
    """ Return a MaNGA Spectrum1D object

    Returns a Spectrum1D object for a MaNGA data files. Use the `per_unit`
    kwarg to indicate the "spaxel" or "fiber" unit for cubes and rss files,
    respectively. Note that the spectral axis will automatically be moved to
    be last during Spectrum1D initialization.

    Parameters
    ----------
    hdulist : fits.HDUList
        A MaNGA read astropy fits HDUList
    per_unit : astropy.units.Unit
        An astropy unit to divide the default flux unit by

    Returns
    -------
    Spectrum1D
        The spectrum contained in the file.
    """
    unit = u.Unit('1e-17 erg / (Angstrom cm2 s)')
    if per_unit:
        unit = unit / per_unit

    hdr = hdulist['PRIMARY'].header
    wcs = WCS(hdulist['FLUX'].header)

    flux = hdulist['FLUX'].data * unit
    ivar = InverseVariance(hdulist["IVAR"].data)
    # SDSS masks are arrays of bit values storing multiple boolean conditions.
    mask = hdulist['MASK'].data != 0

    return Spectrum1D(flux=flux,
                      meta={'header': hdr},
                      wcs=wcs,
                      uncertainty=ivar,
                      mask=mask)
コード例 #14
0
ファイル: specutils.py プロジェクト: desihub/prospect
def read_spPlate(filename, limit=None):
    """Read a SDSS spPlate file.

    Parameters
    ----------
    filename : :class:`str`
        Name of the spPlate file.
    limit : :class:`int`, optional
        If set, only return the first `limit` spectra.

    Returns
    -------
    Spectrum1D
        The spectra.
    """
    with fits.open(filename) as hdulist:
        header = hdulist[0].header
        meta = {'header': header}
        try:
            flux_unit = u.Unit(hdulist[0].header['BUNIT'])
        except ValueError:
            flux_unit = u.Unit('1e-17 erg / (Angstrom cm2 s)')
        if limit is None:
            limit = header['NAXIS2']
        flux = hdulist[0].data[0:limit, :] * flux_unit
        wcs = WCS(header)
        dispersion_unit = u.Unit('Angstrom')
        dispersion = 10**wcs.all_pix2world(np.vstack((np.arange(flux.shape[1]),
                                                      np.zeros((flux.shape[1],)))).T,
                                           0)[:, 0]
        uncertainty = InverseVariance(hdulist[1].data[0:limit, :])
        mask = hdulist[2].data[0:limit, :] != 0
        meta['plugmap'] = Table.read(hdulist[5])[0:limit]

    return Spectrum1D(flux=flux, spectral_axis=dispersion*dispersion_unit,
                      uncertainty=uncertainty, meta=meta, mask=mask)
コード例 #15
0
    def resample1d(self, orig_spectrum, fin_spec_axis):
        """
        Create a re-sampling matrix to be used in re-sampling spectra in a way
        that conserves flux. If an uncertainty is present in the input spectra
        it will be propagated through to the final resampled output spectra
        as an InverseVariance uncertainty.

        Parameters
        ----------
        orig_spectrum : `~specutils.Spectrum1D`
            The original 1D spectrum.
        fin_spec_axis :  Quantity
            The desired spectral axis array.

        Returns
        -------
        resample_spectrum : `~specutils.Spectrum1D`
            An output spectrum containing the resampled `~specutils.Spectrum1D`
        """

        # Check if units on original spectrum and new wavelength (if defined)
        # match
        if isinstance(fin_spec_axis, Quantity):
            if orig_spectrum.spectral_axis.unit != fin_spec_axis.unit:
                raise ValueError(
                    "Original spectrum spectral axis grid and new"
                    "spectral axis grid must have the same units.")

        if not isinstance(fin_spec_axis, SpectralAxis):
            fin_spec_axis = SpectralAxis(fin_spec_axis)

        # todo: Would be good to return uncertainty in type it was provided?
        # todo: add in weighting options

        # Get provided uncertainty into variance
        if orig_spectrum.uncertainty is not None:
            if isinstance(orig_spectrum.uncertainty, StdDevUncertainty):
                pixel_uncer = np.square(orig_spectrum.uncertainty.array)
            elif isinstance(orig_spectrum.uncertainty, VarianceUncertainty):
                pixel_uncer = orig_spectrum.uncertainty.array
            elif isinstance(orig_spectrum.uncertainty, InverseVariance):
                pixel_uncer = np.reciprocal(orig_spectrum.uncertainty.array)
        else:
            pixel_uncer = None

        orig_axis_in_fin = orig_spectrum.spectral_axis.to(fin_spec_axis.unit)
        resample_grid = self._resample_matrix(orig_axis_in_fin, fin_spec_axis)

        # Now for some broadcasting magic to handle multi dimensional flux inputs
        # Essentially this part is inserting length one dimensions as fillers
        # For example, if we have a (5,6,10) input flux, and an output grid
        # of 3, flux will be broadcast to (5,6,1,10) and resample_grid will
        # Be broadcast to (1,1,3,10).  The sum then reduces down the 10, the
        # original dispersion grid, leaving 3, the new dispersion grid, as
        # the last index.
        new_flux_shape = list(orig_spectrum.flux.shape)
        new_flux_shape.insert(-1, 1)
        in_flux = orig_spectrum.flux.reshape(new_flux_shape)

        ones = [1] * len(orig_spectrum.flux.shape[:-1])
        new_shape_resample_grid = ones + list(resample_grid.shape)
        resample_grid = resample_grid.reshape(new_shape_resample_grid)

        # Calculate final flux
        out_flux = np.sum(in_flux * resample_grid, axis=-1) / np.sum(
            resample_grid, axis=-1)

        # Calculate output uncertainty
        if pixel_uncer is not None:
            pixel_uncer = pixel_uncer.reshape(new_flux_shape)

            out_variance = np.sum(pixel_uncer * resample_grid**2,
                                  axis=-1) / np.sum(resample_grid**2, axis=-1)
            out_uncertainty = InverseVariance(np.reciprocal(out_variance))
        else:
            out_uncertainty = None

        # nan-filling happens by default - replace with zeros if requested:
        if self.extrapolation_treatment == 'zero_fill':
            origedges = orig_spectrum.spectral_axis.bin_edges
            off_edges = (fin_spec_axis < origedges[0]) | (origedges[-1] <
                                                          fin_spec_axis)
            out_flux[off_edges] = 0
            if out_uncertainty is not None:
                out_uncertainty.array[off_edges] = 0

        # todo: for now, use the units from the pre-resampled
        # spectra, although if a unit is defined for fin_spec_axis and it doesn't
        # match the input spectrum it won't work right, will have to think
        # more about how to handle that... could convert before and after
        # calculation, which is probably easiest. Matrix math algorithm is
        # geometry based, so won't work to just let quantity math handle it.
        resampled_spectrum = Spectrum1D(flux=out_flux,
                                        spectral_axis=np.array(fin_spec_axis) *
                                        orig_spectrum.spectral_axis.unit,
                                        uncertainty=out_uncertainty)

        return resampled_spectrum
コード例 #16
0
ファイル: specutils.py プロジェクト: desihub/prospect
    def __init__(self, bands=[], wave={}, flux={}, ivar={}, mask=None, resolution_data=None,
        fibermap=None, meta=None, extra=None, single=False, scores=None):

        self._bands = tuple(bands)
        self._single = single
        self._ftype = np.float64
        if single:
            self._ftype = np.float32

        self._reset_properties()

        self.scores = scores

        if meta is None:
            self.meta = dict()
        elif isinstance(meta, fits.Header):
            self.meta = {'header': meta}
        else:
            self.meta = meta.copy()

        nspec = 0

        # check consistency of input dimensions
        for b in self._bands:
            if wave[b].ndim != 1:
                raise ValueError("Wavelength array for band {} should have shape (Nwave, ).".format(b))
            if flux[b].ndim != 2:
                raise ValueError("Flux array for band {} should have shape (Nspec, Nwave).".format(b))
            if flux[b].shape[1] != wave[b].shape[0]:
                raise ValueError("Flux array wavelength dimension for band {} does not match wavelength grid.".format(b))
            if nspec is None:
                nspec = flux[b].shape[0]
            if fibermap is not None:
                if len(fibermap) != flux[b].shape[0]:
                    raise ValueError("Flux array number of spectra for band {} does not match fibermap.".format(b))
            if ivar[b].shape != flux[b].shape:
                raise ValueError("Inverse variance array dimensions do not match flux for band {}.".format(b))
            if mask is not None:
                if mask[b].shape != flux[b].shape:
                    raise ValueError("Mask array dimensions do not match flux for band {}.".format(b))
                if mask[b].dtype not in (int, np.int64, np.int32, np.uint64, np.uint32):
                    raise ValueError("Bad mask type {}.".format(mask.dtype))
            if resolution_data is not None:
                if resolution_data[b].ndim != 3:
                    raise ValueError("Resolution array for band {} should have shape (Nspec, Ndiag, Nwave).".format(b))
                if resolution_data[b].shape[0] != flux[b].shape[0]:
                    raise ValueError("Resolution array spectrum dimension for band {} does not match flux.".format(b))
                if resolution_data[b].shape[2] != wave[b].shape[0]:
                    raise ValueError("Resolution array wavelength dimension for band {} does not match wavelength grid.".format(b))
            if extra is not None:
                for ex in extra[b].items():
                    if ex[1].shape != flux[b].shape:
                        raise ValueError("Extra arrays must have the same shape as the flux array.")

        if fibermap is not None:
            self.fibermap = fibermap.copy()
        else:
            self.fibermap = None

        # copy band-based data

        for b in self._bands:
            band_meta = dict()
            if mask is None:
                band_meta['mask'] = None
                bool_mask = None
            else:
                band_meta['mask'] = np.copy(mask[b])
                bool_mask = band_meta['mask'] != 0
            if resolution_data is None:
                band_meta['resolution_data'] = None
                band_meta['R'] = None
            else:
                band_meta['resolution_data'] = np.copy(resolution_data[b].astype(self._ftype))
                band_meta['R'] = np.array([Resolution(r) for r in resolution_data[b]])
            if extra is None:
                band_meta['extra'] = None
            else:
                band_meta['extra'] = dict()
                for k, v in extra[b].items():
                    band_meta['extra'][k] = np.copy(v.astype(self._ftype))
            self.append(Spectrum1D(spectral_axis=np.copy(wave[b].astype(self._ftype))*u.Angstrom,
                                   flux=np.copy(flux[b].astype(self._ftype))*u.Unit('10**-17 erg/(s cm2 Angstrom)'),
                                   uncertainty=InverseVariance(np.copy(ivar[b].astype(self._ftype))),
                                   mask=bool_mask,
                                   meta=band_meta))
コード例 #17
0
ファイル: measure_NUV.py プロジェクト: fschmnn/cluster
    # NUV image
    filename = data_ext / 'HST' / 'filterImages' / f'hlsp_phangs-hst_hst_wfc3-uvis_{gal_name.lower()}_f275w_v1_exp-drc-sci.fits'
    error_file = data_ext / 'HST' / 'filterImages' / f'hlsp_phangs-hst_hst_wfc3-uvis_{gal_name.lower()}_f275w_v1_err-drc-wht.fits'

    if not filename.is_file():
        print(f'no NUV data for {gal_name}')
        continue
    else:
        with fits.open(filename) as hdul:
            F275 = NDData(hdul[0].data,
                          mask=hdul[0].data == 0,
                          meta=hdul[0].header,
                          wcs=WCS(hdul[0].header))
            with fits.open(error_file) as hdul:
                F275.uncertainty = InverseVariance(hdul[0].data)
    print(f'read in HST data')

    muse_regions = Regions(mask=nebulae_mask.data,
                           projection=nebulae_mask.meta,
                           bkg=-1)
    hst_regions = muse_regions.reproject(F275.meta)
    print('regions reprojected')

    muse_reproj, footprint = reproject_interp(
        (nebulae_mask.mask, nebulae_mask.wcs), F275.meta)
    mean, median, std = sigma_clipped_stats(F275.data[footprint.astype(bool)])
    print('measuring sigma_clipped_stats')

    tmp = nebulae[nebulae['gal_name'] == gal_name]
コード例 #18
0
ファイル: jwst_reader.py プロジェクト: broulston/specutils
def _jwst_s3d_loader(filename, **kwargs):
    """
    Loader for JWST s3d 3D rectified spectral data in FITS format.

    Parameters
    ----------
    filename : str
        The path to the FITS file

    Returns
    -------
    SpectrumList
        The spectra contained in the file.
    """
    spectra = []

    # Get a list of GWCS objects from the slits
    with asdf.open(filename) as af:
        wcslist = [af.tree["meta"]["wcs"]]

    with fits.open(filename, memmap=False) as hdulist:

        primary_header = hdulist["PRIMARY"].header

        hdulist_sci = [hdu for hdu in hdulist if hdu.name == "SCI"]

        for hdu, wcs in zip(hdulist_sci, wcslist):
            # Get flux
            try:
                flux_unit = u.Unit(hdu.header["BUNIT"])
            except (ValueError, KeyError):
                flux_unit = None

            # The spectral axis is first.  We need it last
            flux_array = hdu.data.T
            flux = Quantity(flux_array, unit=flux_unit)

            # Get the wavelength array from the GWCS object which returns a
            # tuple of (RA, Dec, lambda).
            # Since the spatial and spectral axes are orthogonal in s3d data,
            # it is much faster to compute a slice down the spectral axis.
            grid = grid_from_bounding_box(wcs.bounding_box)[:, :, 0, 0]
            _, _, wavelength_array = wcs(*grid)
            _, _, wavelength_unit = wcs.output_frame.unit

            wavelength = Quantity(wavelength_array, unit=wavelength_unit)

            # Merge primary and slit headers and dump into meta
            slit_header = hdu.header
            header = primary_header.copy()
            header.extend(slit_header, strip=True, update=True)
            meta = {'header': header}

            # get uncertainty information
            ext_name = primary_header.get("ERREXT", "ERR")
            err_type = hdulist[ext_name].header.get("ERRTYPE", 'ERR')
            err_unit = hdulist[ext_name].header.get("BUNIT", None)
            err_array = hdulist[ext_name].data.T

            # ERRTYPE can be one of "ERR", "IERR", "VAR", "IVAR"
            # but mostly ERR for JWST cubes
            # see https://jwst-pipeline.readthedocs.io/en/latest/jwst/data_products/science_products.html#s3d
            if err_type == "ERR":
                err = StdDevUncertainty(err_array, unit=err_unit)
            elif err_type == 'VAR':
                err = VarianceUncertainty(err_array, unit=err_unit)
            elif err_type == 'IVAR':
                err = InverseVariance(err_array, unit=err_unit)
            elif err_type == 'IERR':
                warnings.warn(
                    "Inverse error is not yet a supported astropy.nddata "
                    "uncertainty. Setting err to None.")
                err = None

            # get mask information
            mask_name = primary_header.get("MASKEXT", "DQ")
            mask = hdulist[mask_name].data.T

            spec = Spectrum1D(flux=flux,
                              spectral_axis=wavelength,
                              meta=meta,
                              uncertainty=err,
                              mask=mask)
            spectra.append(spec)

    return SpectrumList(spectra)
コード例 #19
0
ファイル: specutils.py プロジェクト: desihub/prospect
    def update(self, other):
        """
        Overwrite or append new data.

        Given another Spectra object, compare the fibermap information with
        the existing one.  For spectra that already exist, overwrite existing
        data with the new values.  For spectra that do not exist, append that
        data to the end of the spectral data.

        Args:
            other (Spectra): the new data to add.

        Returns:
            nothing (object updated in place).

        """
        if not isinstance(other, Spectra):
            raise ValueError("New data has incorrect type!")

        # Does the other Spectra object have any data?

        if other.num_spectra() == 0:
            return

        # Do we have new bands to add?

        newbands = []
        for b in other.bands:
            if b not in self.bands:
                newbands.append(b)
            else:
                if not np.allclose(self.wave[b], other.wave[b]):
                    raise ValueError("Band {} has an incompatible wavelength grid.".format(b))

        bands = list(self.bands)
        bands.extend(newbands)

        # Are we adding mask data in this update?

        add_mask = False
        if other.mask is None:
            if self.mask is not None:
                raise ValueError("Existing spectra has a mask, cannot "
                                 "update it to a spectra with no mask.")
        else:
            if self.mask is None:
                add_mask = True

        # Are we adding resolution data in this update?

        ndiag = {}

        add_res = False
        if other.resolution_data is None:
            if self.resolution_data is not None:
                raise ValueError("Existing spectra has resolution data, cannot "
                                 "update it to a spectra with none.")
        else:
            if self.resolution_data is not None:
                for b in self.bands:
                    ndiag[b] = self.resolution_data[b].shape[1]
                for b in other.bands:
                    odiag = other.resolution_data[b].shape[1]
                    if b not in self.bands:
                        ndiag[b] = odiag
                    else:
                        if odiag != ndiag[b]:
                            raise ValueError("Resolution matrices for a"
                                             " given band must have the same dimensions.")
            else:
                add_res = True
                for b in other.bands:
                    ndiag[b] = other.resolution_data[b].shape[1]

        # Are we adding extra data in this update?

        add_extra = False
        if other.extra is None:
            if self.extra is not None:
                raise ValueError("Existing spectra has extra data, cannot "
                                 "update it to a spectra with none.")
        else:
            if self.extra is None:
                add_extra = True

        # Compute which targets / exposures are new

        nother = len(other.fibermap)
        exists = np.zeros(nother, dtype=np.int)

        indx_original = []

        if self.fibermap is not None:
            for r in range(nother):
                expid = other.fibermap[r]["EXPID"]
                fiber = other.fibermap[r]["FIBER"]
                for i, row in enumerate(self.fibermap):
                    if (expid == row["EXPID"]) and (fiber == row["FIBER"]):
                        indx_original.append(i)
                        exists[r] += 1

        if len(np.where(exists > 1)[0]) > 0:
            raise ValueError("Found duplicate spectra (same EXPID and FIBER) in the fibermap.")

        indx_exists = np.where(exists == 1)[0]
        indx_new = np.where(exists == 0)[0]

        # Make new data arrays of the correct size to hold both the old and
        # new data

        nupdate = len(indx_exists)
        nnew = len(indx_new)

        if self.fibermap is None:
            nold = 0
            newfmap = other.fibermap.copy()
        else:
            nold = len(self.fibermap)
            newfmap = encode_table(np.zeros( (nold + nnew, ),
                                   dtype=self.fibermap.dtype))

        if self.scores is None:
            if other.scores is None:
                newscores = None
            else:
                newscores = other.scores.copy()
        else:
            newscores = encode_table(np.zeros( (nold + nnew, ),
                                     dtype=self.scores.dtype))

        newwave = {}
        newflux = {}
        newivar = {}

        newmask = None
        if add_mask or self.mask is not None:
            newmask = {}

        newres = None
        newR = None
        if add_res or self.resolution_data is not None:
            newres = {}
            newR = {}

        newextra = None
        if add_extra or self.extra is not None:
            newextra = {}

        for b in bands:
            nwave = None
            if b in self.bands:
                nwave = self.wave[b].shape[0]
                newwave[b] = self.wave[b]
            else:
                nwave = other.wave[b].shape[0]
                newwave[b] = other.wave[b].astype(self._ftype)
            newflux[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            newivar[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            if newmask is not None:
                newmask[b] = np.zeros( (nold + nnew, nwave), dtype=np.uint32)
                newmask[b][:,:] = specmask["NODATA"]
            if newres is not None:
                newres[b] = np.zeros( (nold + nnew, ndiag[b], nwave), dtype=self._ftype)
            if newextra is not None:
                newextra[b] = {}

        # Copy the old data

        if nold > 0:
            # We have some data (i.e. we are not starting with an empty Spectra)
            newfmap[:nold] = self.fibermap
            if newscores is not None:
                newscores[:nold] = self.scores

            for b in self.bands:
                newflux[b][:nold,:] = self.flux[b]
                newivar[b][:nold,:] = self.ivar[b]
                if self.mask is not None:
                    newmask[b][:nold,:] = self.mask[b]
                elif add_mask:
                    newmask[b][:nold,:] = 0
                if self.resolution_data is not None:
                    newres[b][:nold,:,:] = self.resolution_data[b]
                if self.extra is not None:
                    for ex in self.extra[b].items():
                        newextra[b][ex[0]] = np.zeros( newflux[b].shape,
                            dtype=self._ftype)
                        newextra[b][ex[0]][:nold,:] = ex[1]

        # Update existing spectra

        for i, s in enumerate(indx_exists):
            row = indx_original[i]
            for b in other.bands:
                newflux[b][row,:] = other.flux[b][s,:].astype(self._ftype)
                newivar[b][row,:] = other.ivar[b][s,:].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][row,:] = other.mask[b][s,:]
                else:
                    newmask[b][row,:] = 0
                if other.resolution_data is not None:
                    newres[b][row,:,:] = other.resolution_data[b][s,:,:].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][row,:] = ex[1][s,:].astype(self._ftype)

        # Append new spectra

        if nnew > 0:
            newfmap[nold:] = other.fibermap[indx_new]
            if newscores is not None:
                newscores[nold:] = other.scores[indx_new]

            for b in other.bands:
                newflux[b][nold:,:] = other.flux[b][indx_new].astype(self._ftype)
                newivar[b][nold:,:] = other.ivar[b][indx_new].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][nold:,:] = other.mask[b][indx_new]
                else:
                    newmask[b][nold:,:] = 0
                if other.resolution_data is not None:
                    newres[b][nold:,:,:] = other.resolution_data[b][indx_new].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][nold:,:] = ex[1][indx_new].astype(self._ftype)

        # Swap data into place

        self._bands = bands
        self.fibermap = newfmap
        self.scores = newscores
        self._reset_properties()
        for i, b in enumerate(self._bands):
            band_meta = dict()
            if newmask is None:
                band_meta['mask'] = None
                bool_mask = None
            else:
                band_meta['mask'] = newmask[b]
                bool_mask = band_meta['mask'] != 0
            if newres is None:
                band_meta['resolution_data'] = None
                band_meta['R'] = None
            else:
                band_meta['resolution_data'] = newres[b]
                band_meta['R'] = np.array([Resolution(r) for r in newres[b]])
            if newextra is None:
                band_meta['extra'] = None
            else:
                band_meta['extra'] = dict()
                for k, v in newextra[b].items():
                    band_meta['extra'][k] = v
            s = Spectrum1D(spectral_axis=newwave[b]*u.Angstrom,
                           flux=newflux[b]*u.Unit('10**-17 erg/(s cm2 Angstrom)'),
                           uncertainty=InverseVariance(newivar),
                           mask=bool_mask,
                           meta=band_meta)
            try:
                self[i] = s
            except IndexError:
                self.append(s)
        return
コード例 #20
0
def chunk_redshift(data_wave, data_flux, data_noise, template_path, z_lit, targ_delta, overhang, z_test, z_bound, position):
    """Returns the bestfit redshift of each chunk.
    
    Parameters
    ----------
    data_wave : tuple
        Data wavelength array
    data_flux : tuple
        Data flux array
    data_noise : tuple
        Data noise array
    template_path : str
        Path to template spectrum file
    z_lit : float
        Literature redshift of target object
    targ_delta : float
        Wavelength chunk size in Angstroms
    overhang : float
        Amount of wavelength overhang template chunks should have in Angstroms
    z_test : float
        Starting redshift for chunks (measured by eye in 1 chunk)
    z_bound : float
        Amount to add and subtract from z_lit for redshifts to test
    position : str
        'before' or 'after' to indicate if pre- or post-flexure correction
        
    Results
    -------
    bestfit_redshift : tuple
        Best fitting redshift for each chunk
    best_chi2 : tuple
        Minimum chi squared for each chunk
    redshifted_spectra : tuple
        Redshifted chunks
    chi2 : tuple
        All chi2
    """
    
    #Get data chunks
    data_wave_chunks, data_flux_chunks, data_noise_chunks = data_chunks(data_wave, data_flux, data_noise, targ_delta)

    #Get template_chunks 
    temp_wave_chunks, temp_flux_chunks, temp_noise_chunks, temp_central_wavelengths, central_waves = template_chunks(data_wave, 
                                                                                                                     data_flux,
                                                                                                      data_noise, template_path,
                                                                                                      z_lit, targ_delta,
                                                                                                      overhang, position)

    #Find redshifts of each chunk
    observed_chunks = []
    temp_chunks = []
    for i in range(len(data_wave_chunks)):
        observed_chunks.append(Spectrum1D(spectral_axis=data_wave_chunks[i]*u.Angstrom, 
                                          flux=data_flux_chunks[i]*(u.erg/u.s/u.cm**2/u.Angstrom), 
                                          uncertainty=InverseVariance(data_noise_chunks[i])))
        temp_chunks.append(Spectrum1D(spectral_axis=temp_wave_chunks[i]*u.Angstrom, 
                                 flux=temp_flux_chunks[i]*(u.Lsun/u.micron),
                                 uncertainty=StdDevUncertainty(temp_noise_chunks[i])))

    redshifts_chunks = np.linspace(z_test-z_bound, z_test+z_bound, 1000)
    fitted_redshift_results = []
    bestfit_redshift = np.zeros(len(data_wave_chunks))
    best_chi2 = np.zeros(len(data_wave_chunks))
    redshifted_spectra = []
    chi2 = []
    for i in range(len(data_wave_chunks)):
        fitted_redshift_results.append(template_redshift(observed_spectrum=observed_chunks[i], 
                                                            template_spectrum=temp_chunks[i],
                                                            redshift=redshifts_chunks))
        bestfit_redshift[i] = fitted_redshift_results[i][0]
        best_chi2[i] = fitted_redshift_results[i][1]
        redshifted_spectra.append(fitted_redshift_results[i][2])
        chi2.append(fitted_redshift_results[i][3])
        
    return bestfit_redshift, best_chi2, redshifted_spectra, chi2