예제 #1
0
def read_cube(filename, **kwargs):
    cube_data = None
    exclude_exts = []
    data_collection = []
    hdulist = fits.open(filename)
    try:
        cube_data = CubeData.read(hdulist)
    except CubeDataIOError as e:
        warnings.warn('No CubeData found in "{}": {}'.format(
            filename,
            e.message
        ))

    if cube_data is not None:
        data = Data()
        try:
            data.coords = coordinates_from_wcs(cube_data.wcs)
        except AttributeError:
            # There is no wcs. Not to worry now.
            pass
        data.add_component(Component(cube_data), label="cube")
        data_collection.append(data)
        exclude_exts = cube_data.meta.get('hdu_ids')

    # Read in the rest of the FITS file.
    data_collection += _load_fits_generic(hdulist,
                                          exclude_exts=exclude_exts)
    return data_collection
예제 #2
0
    def setup_data(self):
        from glue.core.coordinates import coordinates_from_wcs
        from astropy.wcs import WCS
        wcs = WCS(naxis=3)

        self.data = Data(x=np.zeros((3, 3, 3)))
        self.data.coords = coordinates_from_wcs(wcs)
예제 #3
0
파일: glue_yt.py 프로젝트: jzuhone/glue-yt
 def __init__(self, ds):
     super(YTGlueData, self).__init__()
     self.ds = ds
     self.grid = ds.arbitrary_grid(ds.domain_left_edge,
                                   ds.domain_right_edge, (256, ) * 3)
     self.region = ds.all_data()
     self.cids = [
         ComponentID('{} {}'.format(*f.name), parent=self)
         for f in ds.fields.gas
     ]
     w = astropy.wcs.WCS(naxis=3)
     c = 0.5 * (self.grid.left_edge + self.grid.right_edge)
     c = c.in_units('kpc')
     w.wcs.cunit = [str(c.units)] * 3
     w.wcs.crpix = 0.5 * (np.array(self.grid.shape) + 1)
     w.wcs.cdelt = self.grid.dds.in_units('kpc').d
     w.wcs.crval = c.d
     self.coords = coordinates_from_wcs(w)
     wcids = []
     for i in range(self.ndim):
         label = self.coords.axis_label(i)
         wcids.append(ComponentID(label, parent=self))
     self._world_component_ids = wcids
     self._dds = (ds.domain_width / self.shape).to_value("code_length")
     self._left_edge = self.ds.domain_left_edge.to_value("code_length")
     self._right_edge = self.ds.domain_right_edge.to_value("code_length")
예제 #4
0
def deimos_spectrum1D_reader(file_name):
    """
    Data loader for Keck/DEIMOS 1D spectra.

    This loads the 'Bxspf-B' (extension 1)
    and 'Bxspf-R' (extension 2) and appends them
    together to proudce the combined Red/Blue Spectrum
    along with their Wavelength and Inverse Variance
    arrays.
    """
    with fits.open(file_name) as hdulist:
        data = Data(label='1D Spectrum')
        hdulist[1].header['CTYPE1'] = 'WAVE'
        hdulist[1].header['CUNIT1'] = 'Angstrom'
        data.header = hdulist[1].header
        wcs = WCS(hdulist[1].header)
        data.coords = coordinates_from_wcs(wcs)

        full_wl = np.append(hdulist[1].data['LAMBDA'][0],
                            hdulist[2].data['LAMBDA'][0])
        full_spec = np.append(hdulist[1].data['SPEC'][0],
                              hdulist[2].data['SPEC'][0])
        full_ivar = np.append(hdulist[1].data['IVAR'][0],
                              hdulist[2].data['IVAR'][0])

        data.add_component(full_wl, 'Wavelength')
        data.add_component(full_spec, 'Flux')
        data.add_component(1 / np.sqrt(full_ivar), 'Uncertainty')

    return data
예제 #5
0
def deimos_spectrum1D_reader(file_name):
    """
    Data loader for Keck/DEIMOS 1D spectra.

    This loads the 'Bxspf-B' (extension 1)
    and 'Bxspf-R' (extension 2) and appends them
    together to proudce the combined Red/Blue Spectrum
    along with their Wavelength and Inverse Variance
    arrays.
    """
    with fits.open(file_name) as hdulist:
        data = Data(label='1D Spectrum')
        hdulist[1].header['CTYPE1'] = 'WAVE'
        hdulist[1].header['CUNIT1'] = 'Angstrom'
        data.header = hdulist[1].header
        wcs = WCS(hdulist[1].header)
        data.coords = coordinates_from_wcs(wcs)

        full_wl = np.append(hdulist[1].data['LAMBDA'][0], hdulist[2].data['LAMBDA'][0])
        full_spec = np.append(hdulist[1].data['SPEC'][0], hdulist[2].data['SPEC'][0])
        full_ivar = np.append(hdulist[1].data['IVAR'][0], hdulist[2].data['IVAR'][0])

        data.add_component(full_wl, 'Wavelength')
        data.add_component(full_spec, 'Flux')
        data.add_component(1 / np.sqrt(full_ivar), 'Uncertainty')

    return data
예제 #6
0
def load_wcs_data(data_collection):

    data_equ = Data(coords=coordinates_from_wcs(EQUATORIAL_WCS))
    data_equ['primary'] = np.random.random((512, 512))
    data_collection.append(data_equ)

    data_gal = Data(coords=coordinates_from_wcs(GALACTIC_WCS))
    data_gal['primary'] = np.random.random((512, 512))
    data_collection.append(data_gal)

    wy1, wx1 = data_gal.world_component_ids
    wy2, wx2 = data_equ.world_component_ids

    link = Galactic_to_FK5(wx1, wy1, wx2, wy2)

    data_collection.add_link(link)
예제 #7
0
    def setup_data(self):
        from glue.core.coordinates import coordinates_from_wcs
        from astropy.wcs import WCS
        wcs = WCS(naxis=3)

        self.data = Data(x=np.zeros((3, 3, 3)))
        self.data.coords = coordinates_from_wcs(wcs)
예제 #8
0
    def _get_sci_group(i, index):
        d = Data("%s_%i" % (label, index))
        d.coords = coordinates_from_wcs(HSTWCS(hdulist, i))

        index = index + 1
        d.add_component(hdulist[i].data, hdulist[i].name)
        for h in hdulist[i:]:
            if h.name == 'SCI':
                break  # new science grp
            if h.name not in ['ERR', 'DQ']:
                continue
            d.add_component(h.data, h.name)
        return d
예제 #9
0
    def _get_sci_group(i, index):
        d = Data("%s_%i" % (label, index))
        d.coords = coordinates_from_wcs(HSTWCS(hdulist, i))

        index = index + 1
        d.add_component(hdulist[i].data, hdulist[i].name)
        for h in hdulist[i:]:
            if h.name  == 'SCI':
                break  # new science grp
            if h.name not in ['ERR', 'DQ']:
                continue
            d.add_component(h.data, h.name)
        return d
예제 #10
0
def spectral_cube_to_data(cube, label=None):

    if isinstance(cube, SpectralCube):
        cube = StokesSpectralCube({'I': cube})

    result = Data(label=label)
    result.coords = coordinates_from_wcs(cube.wcs)

    for component in cube.components:
        data = getattr(cube, component)._data
        result.add_component(data, label='STOKES {0}'.format(component))

    return result
예제 #11
0
def spectral_cube_to_data(cube, label=None):

    if isinstance(cube, SpectralCube):
        cube = StokesSpectralCube({'I': cube})

    result = Data(label=label)
    result.coords = coordinates_from_wcs(cube.wcs)

    for component in cube.components:
        data = getattr(cube, component).unmasked_data[...]
        result.add_component(data, label='STOKES {0}'.format(component))

    return result
예제 #12
0
def tfw_to_coords(filename, shp):
    """ Use a TIFF world file to build Glue Coordinates """
    with open(filename) as hfile:
        hdr = hfile.read().splitlines()
    hdr = map(float, hdr)
    hdr = dict(CD1_1=hdr[0],
               CD1_2=hdr[1] * (-1),
               CD2_1=hdr[2],
               CD2_2=hdr[3] * (-1),
               CRVAL1=hdr[4],
               CRVAL2=hdr[5],
               CRPIX1=0,
               CRPIX2=shp[0])
    wcs = WCS(hdr)
    return coordinates_from_wcs(wcs)
예제 #13
0
def deimos_spectrum2D_reader(file_name):
    """
    Data loader for Keck/DEIMOS 2D spectra.

    This loads only the Flux and Inverse variance.
    Wavelength information comes from the WCS.
    """

    hdulist = fits.open(file_name)
    data = Data(label='2D Spectrum')
    hdulist[1].header['CTYPE2'] = 'Spatial Y'
    wcs = WCS(hdulist[1].header)
    # original WCS has both axes named "LAMBDA", glue requires unique component names

    data.coords = coordinates_from_wcs(wcs)
    data.header = hdulist[1].header
    data.add_component(hdulist[1].data['FLUX'][0], 'Flux')
    data.add_component(1 / np.sqrt(hdulist[1].data['IVAR'][0]), 'Uncertainty')
    return data
예제 #14
0
파일: mos_loaders.py 프로젝트: pllim/mosviz
def deimos_spectrum2D_reader(file_name):
    """
    Data loader for Keck/DEIMOS 2D spectra.

    This loads only the Flux and Inverse variance.
    Wavelength information comes from the WCS.
    """

    hdulist = fits.open(file_name)
    data = Data(label='2D Spectrum')
    hdulist[1].header['CTYPE2'] = 'Spatial Y'
    wcs = WCS(hdulist[1].header)
    # original WCS has both axes named "LAMBDA", glue requires unique component names

    data.coords = coordinates_from_wcs(wcs)
    data.header = hdulist[1].header
    data.add_component(hdulist[1].data['FLUX'][0], 'Flux')
    data.add_component(hdulist[1].data['IVAR'][0], 'Uncertainty')
    return data
예제 #15
0
def herschel_data(filename):
    """
    Data loader customized for Herschel fits files

    This function extracts extension named 'image',
    'error', 'coverage', etc
    from a file. Each is returned as a glue Data object.
    To handle PACS cubes, if ImageIndex extension is present
    it is used to provide wavelengths

    astropy.wcs.WCS objects are used to parse wcs.

    Any other extensions are ignored
    """

    hdulist = fits.open(filename, memmap=True, ignore_missing_end=True)

    d = Data("data")
    # Fix for invalid CUNIT values in 12.1 PACS data
    for c in ['CUNIT1', 'CUNIT2']:
        if (c in hdulist['image'].header.keys()):
            hdulist['image'].header[c] = 'deg'
    if ('CUNIT3' in hdulist['image'].header.keys()):
        hdulist['image'].header['CUNIT3'] = 'um'
    d.coords = coordinates_from_wcs(WCS(hdulist['image'].header))
    wavelengths = None
    for h in hdulist:
        if (h.name in ['image', 'error', 'coverage']):
            d.add_component(hdulist[h.name].data, h.name)
        if (h.name == 'ImageIndex'):
            wavelengths = hdulist[h.name].data.field(0)

    # Fix up wavelengths if needed
    if ((wavelengths != None)
            and (d['Wavelength'].shape[0] == len(wavelengths))):
        warray = np.zeros(d['Wavelength'].shape, dtype=d['Wavelength'].dtype)
        warray += wavelengths[:, np.newaxis, np.newaxis]
        d.remove_component('Wavelength')
        d.add_component(warray, label='Wavelength')

    return d
예제 #16
0
def pre_nircam_image_reader(file_name):
    """
    Data loader for simulated NIRCam image. This is for the
    full image, where cut-outs will be created on the fly.

    From the header:
    If ISWFS is T, structure is:

            -  Plane 1: Signal [frame3 - frame1] in ADU
            -  Plane 2: Signal uncertainty [sqrt(2*RN/g + \|frame3\|)]

    If ISWFS is F, structure is:

            -  Plane 1: Signal from linear fit to ramp [ADU/sec]
            -  Plane 2: Signal uncertainty [ADU/sec]

    Note that in the later case, the uncertainty is simply the formal
    uncertainty in the fit parameter (eg. uncorrelated, WRONG!). Noise
    model to be implemented at a later date.
    In the case of WFS, error is computed as SQRT(2*sigma_read + \|frame3\|)
    which should be a bit more correct - ~Fowler sampling.

    The FITS file has a single extension with a data cube.
    The data is the first slice of the cube and the uncertainty
    is the second slice.

    """

    hdulist = fits.open(file_name)
    data = Data(label='NIRCam Image')
    data.header = hdulist[0].header
    wcs = WCS(hdulist[0].header)

    # drop the last axis since the cube will be split
    data.coords = coordinates_from_wcs(wcs)
    data.add_component(hdulist[0].data, 'Flux')
    data.add_component(hdulist[0].data / 100, 'Uncertainty')

    hdulist.close()

    return data
예제 #17
0
def pre_nircam_image_reader(file_name):
    """
    Data loader for simulated NIRCam image. This is for the
    full image, where cut-outs will be created on the fly.

    From the header:
    If ISWFS is T, structure is:

            -  Plane 1: Signal [frame3 - frame1] in ADU
            -  Plane 2: Signal uncertainty [sqrt(2*RN/g + \|frame3\|)]

    If ISWFS is F, structure is:

            -  Plane 1: Signal from linear fit to ramp [ADU/sec]
            -  Plane 2: Signal uncertainty [ADU/sec]

    Note that in the later case, the uncertainty is simply the formal
    uncertainty in the fit parameter (eg. uncorrelated, WRONG!). Noise
    model to be implemented at a later date.
    In the case of WFS, error is computed as SQRT(2*sigma_read + \|frame3\|)
    which should be a bit more correct - ~Fowler sampling.

    The FITS file has a single extension with a data cube.
    The data is the first slice of the cube and the uncertainty
    is the second slice.

    """

    hdulist = fits.open(file_name)
    data = Data(label='NIRCam Image')
    data.header = hdulist[0].header
    wcs = WCS(hdulist[0].header)

    # drop the last axis since the cube will be split
    data.coords = coordinates_from_wcs(wcs)
    data.add_component(hdulist[0].data, 'Flux')
    data.add_component(hdulist[0].data / 100, 'Uncertainty')

    hdulist.close()

    return data
예제 #18
0
    def __init__(self, ds_all, units=None):
        super(YTGlueData, self).__init__()
        self.ds_all = ds_all
        self.ds = ds_all[0]
        self._current_step = 0
        if units is None:
            self.units = self.ds.get_smallest_appropriate_unit(
                self.ds.domain_width[0])
        else:
            self.units = units
        self.region = self.ds.all_data()
        self.cids = [
            ComponentID('"{}","{}"'.format(*f.name), parent=self)
            for f in self.ds.fields.gas
        ]
        self._dds = (self.ds.domain_width / self.shape).d
        self._left_edge = self.ds.domain_left_edge.d
        self._right_edge = self.ds.domain_right_edge.d
        w = astropy.wcs.WCS(naxis=3)
        c = 0.5 * (self.ds.domain_left_edge + self.ds.domain_right_edge)
        w.wcs.cunit = [self.units] * 3
        w.wcs.crpix = 0.5 * (np.array(self.shape) + 1)
        w.wcs.cdelt = self.ds.arr(self._dds,
                                  "code_length").to_value(self.units)
        w.wcs.crval = c.to_value(self.units)
        self.wcs = w
        self.coords = coordinates_from_wcs(w)
        wcids = []
        for i in range(self.ndim):
            label = self.coords.axis_label(i)
            wcids.append(ComponentID(label, parent=self))
        self._world_component_ids = wcids
        for i, ax in enumerate("xyz"):

            def _pixel_c(field, data):
                return self._get_pix(data["index", ax].d, ax=i)

            self.ds.add_field(("index", "pixel_{}".format(ax)),
                              _pixel_c,
                              units="")
예제 #19
0
파일: image.py 프로젝트: PennyQ/glue
def img_data(file_name):
    """Load common image files into a Glue data object"""
    result = Data()

    data = img_loader(file_name)
    data = np.flipud(data)
    shp = data.shape

    comps = []
    labels = []

    # split 3 color images into each color plane
    if len(shp) == 3 and shp[2] in [3, 4]:
        comps.extend([data[:, :, 0], data[:, :, 1], data[:, :, 2]])
        labels.extend(['red', 'green', 'blue'])
        if shp[2] == 4:
            comps.append(data[:, :, 3])
            labels.append('alpha')
    else:
        comps = [data]
        labels = ['PRIMARY']

    # look for AVM coordinate metadata
    try:
        from pyavm import AVM
        avm = AVM(str(file_name))  # avoid unicode
        wcs = avm.to_wcs()
    except:
        pass
    else:
        result.coords = coordinates_from_wcs(wcs)

    for c, l in zip(comps, labels):
        result.add_component(c, l)

    return result
예제 #20
0
파일: image.py 프로젝트: dhomeier/glue
def img_data(file_name):
    """Load common image files into a Glue data object"""
    result = Data()

    data = img_loader(file_name)
    data = np.flipud(data)
    shp = data.shape

    comps = []
    labels = []

    # split 3 color images into each color plane
    if len(shp) == 3 and shp[2] in [3, 4]:
        comps.extend([data[:, :, 0], data[:, :, 1], data[:, :, 2]])
        labels.extend(['red', 'green', 'blue'])
        if shp[2] == 4:
            comps.append(data[:, :, 3])
            labels.append('alpha')
    else:
        comps = [data]
        labels = ['PRIMARY']

    # look for AVM coordinate metadata
    try:
        from pyavm import AVM
        avm = AVM.from_image(str(file_name))  # avoid unicode
        wcs = avm.to_wcs()
    except Exception:
        pass
    else:
        result.coords = coordinates_from_wcs(wcs)

    for c, l in zip(comps, labels):
        result.add_component(c, l)

    return result