Пример #1
0
    def write(self, data):
        """
        Write the given data to the stream.

        Parameters
        ----------
        data : ndarray
            Data to stream to the file.

        Returns
        -------
        writecomplete : int
            Flag that when `True` indicates that all of the required
            data has been written to the stream.

        Notes
        -----
        Only the amount of data specified in the header provided to the class
        constructor may be written to the stream.  If the provided data would
        cause the stream to overflow, an `OSError` exception is
        raised and the data is not written. Once sufficient data has been
        written to the stream to satisfy the amount specified in the header,
        the stream is padded to fill a complete FITS block and no more data
        will be accepted. An attempt to write more data after the stream has
        been filled will raise an `OSError` exception. If the
        dtype of the input data does not match what is expected by the header,
        a `TypeError` exception is raised.
        """

        size = self._ffo.tell() - self._data_offset

        if self.writecomplete or size + data.nbytes > self._size:
            raise OSError('Attempt to write more data to the stream than the '
                          'header specified.')

        if BITPIX2DTYPE[self._header['BITPIX']] != data.dtype.name:
            raise TypeError('Supplied data does not match the type specified '
                            'in the header.')

        if data.dtype.str[0] != '>':
            # byteswap little endian arrays before writing
            output = data.byteswap()
        else:
            output = data

        self._ffo.writearray(output)

        if self._ffo.tell() - self._data_offset == self._size:
            # the stream is full so pad the data to the next FITS block
            self._ffo.write(_pad_length(self._size) * '\0')
            self.writecomplete = True

        self._ffo.flush()

        return self.writecomplete
Пример #2
0
    def write(self, data):
        """
        Write the given data to the stream.

        Parameters
        ----------
        data : ndarray
            Data to stream to the file.

        Returns
        -------
        writecomplete : int
            Flag that when `True` indicates that all of the required
            data has been written to the stream.

        Notes
        -----
        Only the amount of data specified in the header provided to the class
        constructor may be written to the stream.  If the provided data would
        cause the stream to overflow, an `OSError` exception is
        raised and the data is not written. Once sufficient data has been
        written to the stream to satisfy the amount specified in the header,
        the stream is padded to fill a complete FITS block and no more data
        will be accepted. An attempt to write more data after the stream has
        been filled will raise an `OSError` exception. If the
        dtype of the input data does not match what is expected by the header,
        a `TypeError` exception is raised.
        """

        size = self._ffo.tell() - self._data_offset

        if self.writecomplete or size + data.nbytes > self._size:
            raise OSError('Attempt to write more data to the stream than the '
                          'header specified.')

        if BITPIX2DTYPE[self._header['BITPIX']] != data.dtype.name:
            raise TypeError('Supplied data does not match the type specified '
                            'in the header.')

        if data.dtype.str[0] != '>':
            # byteswap little endian arrays before writing
            output = data.byteswap()
        else:
            output = data

        self._ffo.writearray(output)

        if self._ffo.tell() - self._data_offset == self._size:
            # the stream is full so pad the data to the next FITS block
            self._ffo.write(_pad_length(self._size) * '\0')
            self.writecomplete = True

        self._ffo.flush()

        return self.writecomplete
Пример #3
0
    def fromhdulist(cls, hdulist, compress=False):
        """
        Creates a new FitsHDU from a given HDUList object.

        Parameters
        ----------
        hdulist : HDUList
            A valid Headerlet object.
        compress : bool, optional
            Gzip compress the FITS file
        """

        fileobj = bs = io.BytesIO()
        if compress:
            if hasattr(hdulist, '_file'):
                name = fileobj_name(hdulist._file)
            else:
                name = None
            fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs)

        hdulist.writeto(fileobj)

        if compress:
            fileobj.close()

        # A proper HDUList should still be padded out to a multiple of 2880
        # technically speaking
        padding = (_pad_length(bs.tell()) * cls._padding_byte).encode('ascii')
        bs.write(padding)

        bs.seek(0)

        cards = [
            ('XTENSION', cls._extension, 'FITS extension'),
            ('BITPIX', 8, 'array data type'),
            ('NAXIS', 1, 'number of array dimensions'),
            ('NAXIS1', len(bs.getvalue()), 'Axis length'),
            ('PCOUNT', 0, 'number of parameters'),
            ('GCOUNT', 1, 'number of groups'),
        ]

        # Add the XINDn keywords proposed by Perry, though nothing is done with
        # these at the moment
        if len(hdulist) > 1:
            for idx, hdu in enumerate(hdulist[1:]):
                cards.append(('XIND' + str(idx + 1), hdu._header_offset,
                              f'byte offset of extension {idx + 1}'))

        cards.append(('COMPRESS', compress, 'Uses gzip compression'))
        header = Header(cards)
        return cls._readfrom_internal(_File(bs), header=header)
Пример #4
0
    def fromhdulist(cls, hdulist, compress=False):
        """
        Creates a new FitsHDU from a given HDUList object.

        Parameters
        ----------
        hdulist : HDUList
            A valid Headerlet object.
        compress : bool, optional
            Gzip compress the FITS file
        """

        fileobj = bs = io.BytesIO()
        if compress:
            if hasattr(hdulist, '_file'):
                name = fileobj_name(hdulist._file)
            else:
                name = None
            fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs)

        hdulist.writeto(fileobj)

        if compress:
            fileobj.close()

        # A proper HDUList should still be padded out to a multiple of 2880
        # technically speaking
        padding = (_pad_length(bs.tell()) * cls._padding_byte).encode('ascii')
        bs.write(padding)

        bs.seek(0)

        cards = [
            ('XTENSION', cls._extension, 'FITS extension'),
            ('BITPIX', 8, 'array data type'),
            ('NAXIS', 1, 'number of array dimensions'),
            ('NAXIS1', len(bs.getvalue()), 'Axis length'),
            ('PCOUNT', 0, 'number of parameters'),
            ('GCOUNT', 1, 'number of groups'),
        ]

        # Add the XINDn keywords proposed by Perry, though nothing is done with
        # these at the moment
        if len(hdulist) > 1:
            for idx, hdu in enumerate(hdulist[1:]):
                cards.append(('XIND' + str(idx + 1), hdu._header_offset,
                              'byte offset of extension {}'.format(idx + 1)))

        cards.append(('COMPRESS', compress, 'Uses gzip compression'))
        header = Header(cards)
        return cls._readfrom_internal(_File(bs), header=header)
Пример #5
0
    def from_buff(cls, buff, compress=False, **kwargs):
        """
        Creates a new _AsdfHDU from a given AsdfFile object.

        Parameters
        ----------
        buff : io.BytesIO
            A buffer containing an ASDF metadata tree
        compress : bool, optional
            Gzip compress the contents of the ASDF HDU
        """

        if compress:
            buff = gzip.GzipFile(fileobj=buff, mode='wb')

        # A proper HDU should still be padded out to a multiple of 2880
        # technically speaking
        data_length = buff.tell()
        padding = (_pad_length(data_length) *
                   cls._padding_byte).encode('ascii')
        buff.write(padding)

        buff.seek(0)

        cards = [
            ('XTENSION', cls._extension, 'ASDF extension'),
            ('BITPIX', 8, 'array data type'),
            ('NAXIS', 1, 'number of array dimensions'),
            ('NAXIS1', data_length, 'Axis length'),
            ('PCOUNT', 0, 'number of parameters'),
            ('GCOUNT', 1, 'number of groups'),
            ('COMPRESS', compress, 'Uses gzip compression'),
            ('EXTNAME', cls._extension, 'Name of ASDF extension'),
        ]

        header = Header(cards)
        return cls._readfrom_internal(_File(buff), header=header)