def fromhdulist(cls, hdulist, compress=False): """ Creates a new FitsHDU from a given HDUList object. Parameters ---------- hdulist : HDUList A valid Headerlet object. compress : bool (optional) Gzip compress the FITS file """ fileobj = bs = BytesIO() if compress: if hasattr(hdulist, '_file'): name = fileobj_name(hdulist._file) else: name = None fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs) hdulist.writeto(fileobj) if compress: fileobj.close() bs.seek(0) cards = [ ('XTENSION', cls._extension, 'FITS extension'), ('BITPIX', 8, 'array data type'), ('NAXIS', 1, 'number of array dimensions'), ('NAXIS1', len(bs.getvalue()), 'Axis length'), ('PCOUNT', 0, 'number of parameters'), ('GCOUNT', 1, 'number of groups'), ] # Add the XINDn keywords proposed by Perry, though nothing is done with # these at the moment if len(hdulist) > 1: for idx, hdu in enumerate(hdulist[1:]): cards.append(('XIND' + str(idx + 1), hdu._hdrLoc, 'byte offset of extension %d' % (idx + 1))) cards.append(('COMPRESS', compress, 'Uses gzip compression')) header = Header(cards) # TODO: This wrapping of the fileobj should probably be handled by # cls.fromstring, though cls.fromstring itself has a strange # implementation that I probably need to fix. For example, it # shouldn't care about fileobjs. There should be a _BaseHDU.fromfile # for that (there is _BaseHDU.readfrom which plays that role, but its # semantics are also a little unclear...) return cls.fromstring(header, fileobj=_File(bs))
def _stat_filename_or_fileobj(filename): closed = fileobj_closed(filename) name = fileobj_name(filename) or '' try: loc = filename.tell() except AttributeError: loc = 0 noexist_or_empty = ((name and (not os.path.exists(name) or (os.path.getsize(name) == 0))) or (not name and loc == 0)) return name, closed, noexist_or_empty
def writeto(self, fileobj, output_verify='exception', clobber=False, checksum=False): """ Write the `HDUList` to a new file. Parameters ---------- fileobj : file path, file object or file-like object File to write to. If a file object, must be opened for append (ab+). output_verify : str Output verification option. Must be one of ``"fix"``, ``"silentfix"``, ``"ignore"``, ``"warn"``, or ``"exception"``. See :ref:`verify` for more info. clobber : bool When `True`, overwrite the output file if exists. checksum : bool When `True` adds both ``DATASUM`` and ``CHECKSUM`` cards to the headers of all HDU's written to the file. """ if (len(self) == 0): warnings.warn("There is nothing to write.") return self.verify(option=output_verify) # check if the file object is closed closed = fileobj_closed(fileobj) fmode = fileobj_mode(fileobj) or 'ab+' filename = fileobj_name(fileobj) # check if the output file already exists if (isfile(fileobj) or isinstance(fileobj, (basestring, gzip.GzipFile))): if (os.path.exists(filename) and os.path.getsize(filename) != 0): if clobber: warnings.warn("Overwriting existing file '%s'." % filename) if not closed: fileobj.close() os.remove(filename) else: raise IOError("File '%s' already exists." % filename) elif (hasattr(fileobj, 'len') and fileobj.len > 0): if clobber: warnings.warn("Overwriting existing file '%s'." % filename) name.truncate(0) else: raise IOError("File '%s' already exists." % filename) # make sure the EXTEND keyword is there if there is extension self.update_extend() mode = 'copyonwrite' for key, val in PYTHON_MODES.iteritems(): if val == fmode: mode = key break hdulist = fitsopen(fileobj, mode=mode) for hdu in self: hdu._prewriteto(checksum=checksum) try: hdu._writeto(hdulist.__file) finally: hdu._postwriteto() hdulist.close(output_verify=output_verify, closed=closed)
def __init__(self, name, header): """ Construct a `StreamingHDU` object given a file name and a header. Parameters ---------- name : file path, file object, or file like object The file to which the header and data will be streamed. If opened, the file object must be opened for append (ab+). header : `Header` instance The header object associated with the data to be written to the file. Notes ----- The file will be opened and the header appended to the end of the file. If the file does not already exist, it will be created, and if the header represents a Primary header, it will be written to the beginning of the file. If the file does not exist and the provided header is not a Primary header, a default Primary HDU will be inserted at the beginning of the file and the provided header will be added as the first extension. If the file does already exist, but the provided header represents a Primary header, the header will be modified to an image extension header and appended to the end of the file. """ if isinstance(name, gzip.GzipFile): raise TypeError('StreamingHDU not supported for GzipFile objects.') self._header = header.copy() # handle a file object instead of a file name filename = fileobj_name(name) or '' # # Check if the file already exists. If it does not, check to see # if we were provided with a Primary Header. If not we will need # to prepend a default PrimaryHDU to the file before writing the # given header. # newfile = False if filename: if not os.path.exists(filename) or os.path.getsize(filename) == 0: newfile = True elif (hasattr(name, 'len') and name.len == 0): newfile = True if newfile: if 'SIMPLE' not in self._header: hdulist = HDUList([PrimaryHDU()]) hdulist.writeto(name, 'exception') else: # # This will not be the first extension in the file so we # must change the Primary header provided into an image # extension header. # if 'SIMPLE' in self._header: self._header.set('XTENSION', 'IMAGE', 'Image extension', after='SIMPLE') del self._header['SIMPLE'] if 'PCOUNT' not in self._header: dim = self._header['NAXIS'] if dim == 0: dim = '' else: dim = str(dim) self._header.set('PCOUNT', 0, 'number of parameters', after='NAXIS' + dim) if 'GCOUNT' not in self._header: self._header.set('GCOUNT', 1, 'number of groups', after='PCOUNT') self._ffo = _File(name, 'append') # TODO : Fix this once the HDU writing API is cleaned up tmp_hdu = _BaseHDU() # Passing self._header as an argument to _BaseHDU() will cause its # values to be modified in undesired ways...need to have a better way # of doing this tmp_hdu._header = self._header self._header_offset = tmp_hdu._writeheader(self._ffo)[0] self._data_offset = self._ffo.tell() self._size = self.size if self._size != 0: self.writecomplete = False else: self.writecomplete = True
def __init__(self, fileobj=None, mode='readonly', memmap=False): if fileobj is None: self.__file = None self.closed = False self.mode = mode self.memmap = memmap self.compression = None self.readonly = False self.writeonly = False self.simulateonly = True return else: self.simulateonly = False if mode not in PYTHON_MODES: raise ValueError("Mode '%s' not recognized" % mode) if (isinstance(fileobj, basestring) and mode != 'append' and not os.path.exists(fileobj) and not os.path.splitdrive(fileobj)[0]): # # Not writing file and file does not exist on local machine and # name does not begin with a drive letter (Windows), try to # get it over the web. # self.name, _ = urllib.urlretrieve(fileobj) else: self.name = fileobj_name(fileobj) self.closed = False self.mode = mode self.memmap = memmap # Underlying fileobj is a file-like object, but an actual file object self.file_like = False # More defaults to be adjusted below as necessary self.compression = None self.readonly = False self.writeonly = False # Initialize the internal self.__file object if isfile(fileobj) or isinstance(fileobj, gzip.GzipFile): closed = fileobj_closed(fileobj) fmode = fileobj_mode(fileobj) or PYTHON_MODES[mode] if not closed: # In some cases (like on Python 3) a file opened for # appending still shows a mode of 'r+', hence the extra # check for the append case if ((mode == 'append' and fmode not in ('ab+', 'rb+')) or (mode != 'append' and PYTHON_MODES[mode] != fmode)): raise ValueError( "Input mode '%s' (%s) does not match mode of the " "input file (%s)." % (mode, PYTHON_MODES[mode], fmode)) self.__file = fileobj elif isfile(fileobj): self.__file = fileobj_open(self.name, PYTHON_MODES[mode]) # Return to the beginning of the file--in Python 3 when # opening in append mode the file pointer is at the end of # the file self.__file.seek(0) else: self.__file = gzip.open(self.name, PYTHON_MODES[mode]) elif isinstance(fileobj, basestring): if os.path.exists(self.name): with fileobj_open(self.name, 'rb') as f: magic = f.read(4) else: magic = ''.encode('raw-unicode-escape') ext = os.path.splitext(self.name)[1] if ext == '.gz' or magic.startswith(GZIP_MAGIC): # Handle gzip files if mode in ['update', 'append']: raise IOError( "Writing to gzipped fits files is not currently " "supported") self.__file = gzip.open(self.name) self.compression = 'gzip' elif ext == '.zip' or magic.startswith(PKZIP_MAGIC): # Handle zip files if mode in ['update', 'append']: raise IOError( "Writing to zipped fits files is not currently " "supported") zfile = zipfile.ZipFile(self.name) namelist = zfile.namelist() if len(namelist) != 1: raise IOError( "Zip files with multiple members are not supported.") self.__file = tempfile.NamedTemporaryFile(suffix='.fits') self.__file.write(zfile.read(namelist[0])) zfile.close() self.compression = 'zip' else: self.__file = fileobj_open(self.name, PYTHON_MODES[mode]) # Make certain we're back at the beginning of the file self.__file.seek(0) else: # We are dealing with a file like object. # Assume it is open. self.file_like = True self.__file = fileobj # If there is not seek or tell methods then set the mode to # output streaming. if (not hasattr(self.__file, 'seek') or not hasattr(self.__file, 'tell')): self.mode = mode = 'ostream' if (self.mode in ('copyonwrite', 'update', 'append') and not hasattr(self.__file, 'write')): raise IOError("File-like object does not have a 'write' " "method, required for mode '%s'." % self.mode) if (self.mode in ('readonly', 'denywrite') and not hasattr(self.__file, 'read')): raise IOError("File-like object does not have a 'read' " "method, required for mode %r." % self.mode) if isinstance(fileobj, gzip.GzipFile): self.compression = 'gzip' elif isinstance(fileobj, zipfile.ZipFile): # Reading from zip files is supported but not writing (yet) self.compression = 'zip' if (mode in ('readonly', 'copyonwrite', 'denywrite') or (self.compression and mode == 'update')): self.readonly = True elif (mode == 'ostream' or (self.compression and mode == 'append')): self.writeonly = True # For 'ab+' mode, the pointer is at the end after the open in # Linux, but is at the beginning in Solaris. if (mode == 'ostream' or self.compression or not hasattr(self.__file, 'seek')): # For output stream start with a truncated file. # For compressed files we can't really guess at the size self.size = 0 else: pos = self.__file.tell() self.__file.seek(0, 2) self.size = self.__file.tell() self.__file.seek(pos) if self.memmap and not isfile(self.__file): self.memmap = False
def __init__(self, fileobj=None, mode='readonly', memmap=False): if fileobj is None: self.__file = None self.closed = False self.mode = mode self.memmap = memmap self.compression = None self.readonly = False self.writeonly = False self.simulateonly = True return else: self.simulateonly = False if mode not in PYTHON_MODES: raise ValueError("Mode '%s' not recognized" % mode) if (isinstance(fileobj, basestring) and mode != 'append' and not os.path.exists(fileobj) and not os.path.splitdrive(fileobj)[0]): # # Not writing file and file does not exist on local machine and # name does not begin with a drive letter (Windows), try to # get it over the web. # try: self.name, _ = urllib.urlretrieve(fileobj) except (TypeError, ValueError): # A couple different exceptions can occur here when passing a # filename into urlretrieve in Python 3 raise IOError('File does not exist: %r' % fileobj) else: self.name = fileobj_name(fileobj) self.closed = False self.mode = mode self.memmap = memmap # Underlying fileobj is a file-like object, but an actual file object self.file_like = False # More defaults to be adjusted below as necessary self.compression = None self.readonly = False self.writeonly = False # Initialize the internal self.__file object if isfile(fileobj) or isinstance(fileobj, gzip.GzipFile): self._open_fileobj(fileobj, mode) elif isinstance(fileobj, basestring): self._open_filename(fileobj, mode) else: self._open_filelike(fileobj, mode) if isinstance(fileobj, gzip.GzipFile): self.compression = 'gzip' elif isinstance(fileobj, zipfile.ZipFile): # Reading from zip files is supported but not writing (yet) self.compression = 'zip' if (mode in ('readonly', 'copyonwrite', 'denywrite') or (self.compression and mode == 'update')): self.readonly = True elif (mode == 'ostream' or (self.compression and mode == 'append')): self.writeonly = True # For 'ab+' mode, the pointer is at the end after the open in # Linux, but is at the beginning in Solaris. if (mode == 'ostream' or self.compression or not hasattr(self.__file, 'seek')): # For output stream start with a truncated file. # For compressed files we can't really guess at the size self.size = 0 else: pos = self.__file.tell() self.__file.seek(0, 2) self.size = self.__file.tell() self.__file.seek(pos) if self.memmap and not isfile(self.__file): self.memmap = False