示例#1
0
    def fromhdulist(cls, hdulist, compress=False):
        """
        Creates a new FitsHDU from a given HDUList object.

        Parameters
        ----------
        hdulist : HDUList
            A valid Headerlet object.
        compress : bool (optional)
            Gzip compress the FITS file
        """

        fileobj = bs = BytesIO()
        if compress:
            if hasattr(hdulist, '_file'):
                name = fileobj_name(hdulist._file)
            else:
                name = None
            fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs)
        hdulist.writeto(fileobj)
        if compress:
            fileobj.close()
        bs.seek(0)

        cards = [
            ('XTENSION',  cls._extension, 'FITS extension'),
            ('BITPIX',    8, 'array data type'),
            ('NAXIS',     1, 'number of array dimensions'),
            ('NAXIS1',    len(bs.getvalue()), 'Axis length'),
            ('PCOUNT',    0, 'number of parameters'),
            ('GCOUNT',    1, 'number of groups'),
        ]

        # Add the XINDn keywords proposed by Perry, though nothing is done with
        # these at the moment
        if len(hdulist) > 1:
            for idx, hdu in enumerate(hdulist[1:]):
                cards.append(('XIND' + str(idx + 1), hdu._hdrLoc,
                              'byte offset of extension %d' % (idx + 1)))

        cards.append(('COMPRESS',  compress, 'Uses gzip compression'))
        header = Header(cards)
        # TODO: This wrapping of the fileobj should probably be handled by
        # cls.fromstring, though cls.fromstring itself has a strange
        # implementation that I probably need to fix.  For example, it
        # shouldn't care about fileobjs.  There should be a _BaseHDU.fromfile
        # for that (there is _BaseHDU.readfrom which plays that role, but its
        # semantics are also a little unclear...)
        return cls.fromstring(header, fileobj=_File(bs))
示例#2
0
def append(filename, data, header=None, checksum=False, verify=True, **kwargs):
    """
    Append the header/data to FITS file if filename exists, create if not.

    If only `data` is supplied, a minimal header is created.

    Parameters
    ----------
    filename : file path, file object, or file like object
        File to write to.  If opened, must be opened for update (rb+)
        unless it is a new file, then it must be opened for append
        (ab+).  A file or `GzipFile` object opened for update will be
        closed after return.

    data : array, table, or group data object
        the new data used for appending

    header : `Header` object (optional)
        The header associated with `data`.  If `None`, an appropriate
        header will be created for the data object supplied.

    checksum : bool (optional)
        When `True` adds both ``DATASUM`` and ``CHECKSUM`` cards to
        the header of the HDU when written to the file.

    verify: bool (optional)
        When `True`, the existing FITS file will be read in to verify
        it for correctness before appending.  When `False`, content is
        simply appended to the end of the file.  Setting *verify* to
        `False` can be much faster.

    kwargs
        Any additional keyword arguments to be passed to `pyfits.open`.
    """

    name, closed, noexist_or_empty = _stat_filename_or_fileobj(filename)

    if noexist_or_empty:
        #
        # The input file or file like object either doesn't exits or is
        # empty.  Use the writeto convenience function to write the
        # output to the empty object.
        #
        writeto(filename, data, header, checksum=checksum, **kwargs)
    else:
        hdu = _makehdu(data, header)

        if isinstance(hdu, PrimaryHDU):
            hdu = ImageHDU(data, header)

        if verify or not closed:
            f = fitsopen(filename, mode='append')
            f.append(hdu)

            # Set a flag in the HDU so that only this HDU gets a checksum when
            # writing the file.
            hdu._output_checksum = checksum
            f.close(closed=closed)
        else:
            f = _File(filename, mode='append')
            hdu._output_checksum = checksum
            hdu._writeto(f)
            f.close()
示例#3
0
    def _readfrom(cls, fileobj=None, data=None, mode='readonly',
                  memmap=False, save_backup=False, **kwargs):
        """
        Provides the implementations from HDUList.fromfile and
        HDUList.fromstring, both of which wrap this method, as their
        implementations are largely the same.
        """

        if fileobj is not None:
            # instantiate a FITS file object (ffo)
            ffo = _File(fileobj, mode=mode, memmap=memmap)
            hdulist = cls(file=ffo)
        else:
            hdulist = cls()
            # This method is currently only called from HDUList.fromstring and
            # HDUList.fromfile.  If fileobj is None then this must be the
            # fromstring case; the data type of `data` will be checked in the
            # _BaseHDU.fromstring call.

        hdulist._save_backup = save_backup

        saved_compression_enabled = compressed.COMPRESSION_ENABLED

        try:
            if ('disable_image_compression' in kwargs and
                kwargs['disable_image_compression']):
                compressed.COMPRESSION_ENABLED = False

            if mode == 'ostream':
                # Output stream--not interested in reading/parsing the
                # HDUs--just writing to the output file
                return hdulist

            # read all HDUs
            while True:
                try:
                    if fileobj is not None:
                        try:
                            hdu = _BaseHDU.readfrom(ffo, **kwargs)
                        except EOFError:
                            break
                        except IOError, err:
                            if ffo.writeonly:
                                break
                            else:
                                raise
                    else:
                        if not data:
                            break
                        hdu = _BaseHDU.fromstring(data)
                        data = data[hdu._datLoc + hdu._datSpan:]
                    hdulist.append(hdu)
                    hdu._new = False
                    if 'checksum' in kwargs:
                        hdu._output_checksum = kwargs['checksum']
                # check in the case there is extra space after the last HDU or
                # corrupted HDU
                except (VerifyError, ValueError), err:
                    warnings.warn(
                        'Error validating header for HDU #%d (note: PyFITS '
                        'uses zero-based indexing).\n%s\n'
                        'There may be extra bytes after the last HDU or the '
                        'file is corrupted.' %
                        (len(hdulist), indent(str(err))), VerifyWarning)
                    break
示例#4
0
    def _flush_resize(self):
        """
        Implements flushing changes in update mode when parts of one or more HDU
        need to be resized.
        """

        old_name = self.__file.name
        old_memmap = self.__file.memmap
        name = _tmp_name(old_name)

        if not self.__file.file_like:
            old_mode = os.stat(old_name).st_mode
            # The underlying file is an acutal file object.  The HDUList is
            # resized, so we need to write it to a tmp file, delete the
            # original file, and rename the tmp file to the original file.
            if self.__file.compression == 'gzip':
                new_file = gzip.GzipFile(name, mode='ab+')
            else:
                new_file = name

            hdulist = self.fromfile(new_file, mode='append')

            for hdu in self:
                hdu._writeto(hdulist.__file, inplace=True, copy=True)

            if sys.platform.startswith('win'):
                # Collect a list of open mmaps to the data; this well be used
                # later.  See below.
                mmaps = [(idx, _get_array_mmap(hdu.data), hdu.data)
                         for idx, hdu in enumerate(self) if hdu._data_loaded]

            hdulist.__file.close()
            self.__file.close()

            if sys.platform.startswith('win'):
                # Close all open mmaps to the data.  This is only necessary on
                # Windows, which will not allow a file to be renamed or deleted
                # until all handles to that file have been closed.
                for idx, mmap, arr in mmaps:
                    if mmap is not None:
                        mmap.close()

            os.remove(self.__file.name)

            # reopen the renamed new file with "update" mode
            os.rename(name, old_name)
            os.chmod(old_name, old_mode)

            if isinstance(new_file, gzip.GzipFile):
                old_file = gzip.GzipFile(old_name, mode='rb+')
            else:
                old_file = old_name

            ffo = _File(old_file, mode='update', memmap=old_memmap)

            self.__file = ffo

            for hdu in self:
                # Need to update the _file attribute and close any open mmaps
                # on each HDU
                if (hdu._data_loaded and
                    _get_array_mmap(hdu.data) is not None):
                    del hdu.data
                hdu._file = ffo

            if sys.platform.startswith('win'):
                # On Windows, all the original data mmaps were closed above.
                # However, it's possible that the user still has references to
                # the old data which would no longer work (possibly even cause
                # a segfault if they try to access it).  This replaces the
                # buffers used by the original arrays with the buffers of mmap
                # arrays created from the new file.  This seems to work, but
                # it's a flaming hack and carries no guarantees that it won't
                # lead to odd behavior in practice.  Better to just not keep
                # references to data from files that had to be resized upon
                # flushing (on Windows--again, this is no problem on Linux).
                for idx, mmap, arr in mmaps:
                    if mmap is not None:
                        arr.data = self[idx].data.data
                del mmaps  # Just to be sure

        else:
            # The underlying file is not a file object, it is a file like
            # object.  We can't write out to a file, we must update the file
            # like object in place.  To do this, we write out to a temporary
            # file, then delete the contents in our file like object, then
            # write the contents of the temporary file to the now empty file
            # like object.
            self.writeto(name)
            hdulist = self.fromfile(name)
            ffo = self.__file

            ffo.truncate(0)
            ffo.seek(0)

            for hdu in hdulist:
                hdu._writeto(ffo, inplace=True, copy=True)

            # Close the temporary file and delete it.
            hdulist.close()
            os.remove(hdulist.__file.name)

        # reset the resize attributes after updating
        self._resize = False
        self._truncate = False
        for hdu in self:
            hdu._header._modified = False
            hdu._new = False
            hdu._file = ffo
示例#5
0
文件: streaming.py 项目: coleb/PyFITS
    def __init__(self, name, header):
        """
        Construct a `StreamingHDU` object given a file name and a header.

        Parameters
        ----------
        name : file path, file object, or file like object
            The file to which the header and data will be streamed.
            If opened, the file object must be opened for append
            (ab+).

        header : `Header` instance
            The header object associated with the data to be written
            to the file.

        Notes
        -----
        The file will be opened and the header appended to the end of
        the file.  If the file does not already exist, it will be
        created, and if the header represents a Primary header, it
        will be written to the beginning of the file.  If the file
        does not exist and the provided header is not a Primary
        header, a default Primary HDU will be inserted at the
        beginning of the file and the provided header will be added as
        the first extension.  If the file does already exist, but the
        provided header represents a Primary header, the header will
        be modified to an image extension header and appended to the
        end of the file.
        """

        if isinstance(name, gzip.GzipFile):
            raise TypeError('StreamingHDU not supported for GzipFile objects.')

        self._header = header.copy()

        # handle a file object instead of a file name
        filename = fileobj_name(name) or ''
#
#       Check if the file already exists.  If it does not, check to see
#       if we were provided with a Primary Header.  If not we will need
#       to prepend a default PrimaryHDU to the file before writing the
#       given header.
#
        newfile = False

        if filename:
            if not os.path.exists(filename) or os.path.getsize(filename) == 0:
                newfile = True
        elif (hasattr(name, 'len') and name.len == 0):
            newfile = True

        if newfile:
            if 'SIMPLE' not in self._header:
                hdulist = HDUList([PrimaryHDU()])
                hdulist.writeto(name, 'exception')
        else:
#
#               This will not be the first extension in the file so we
#               must change the Primary header provided into an image
#               extension header.
#
            if 'SIMPLE' in self._header:
                self._header.set('XTENSION', 'IMAGE', 'Image extension',
                                 after='SIMPLE')
                del self._header['SIMPLE']

                if 'PCOUNT' not in self._header:
                    dim = self._header['NAXIS']

                    if dim == 0:
                        dim = ''
                    else:
                        dim = str(dim)

                    self._header.set('PCOUNT', 0, 'number of parameters',
                                     after='NAXIS' + dim)

                if 'GCOUNT' not in self._header:
                    self._header.set('GCOUNT', 1, 'number of groups',
                                     after='PCOUNT')

        self._ffo = _File(name, 'append')

        # TODO : Fix this once the HDU writing API is cleaned up
        tmp_hdu = _BaseHDU()
        # Passing self._header as an argument to _BaseHDU() will cause its
        # values to be modified in undesired ways...need to have a better way
        # of doing this
        tmp_hdu._header = self._header
        self._header_offset = tmp_hdu._writeheader(self._ffo)[0]
        self._data_offset = self._ffo.tell()
        self._size = self.size

        if self._size != 0:
            self.writecomplete = False
        else:
            self.writecomplete = True