def fromhdulist(cls, hdulist, compress=False): """ Creates a new FitsHDU from a given HDUList object. Parameters ---------- hdulist : HDUList A valid Headerlet object. compress : bool, optional Gzip compress the FITS file """ fileobj = bs = io.BytesIO() if compress: if hasattr(hdulist, '_file'): name = fileobj_name(hdulist._file) else: name = None fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs) hdulist.writeto(fileobj) if compress: fileobj.close() # A proper HDUList should still be padded out to a multiple of 2880 # technically speaking padding = (_pad_length(bs.tell()) * cls._padding_byte).encode('ascii') bs.write(padding) bs.seek(0) cards = [ ('XTENSION', cls._extension, 'FITS extension'), ('BITPIX', 8, 'array data type'), ('NAXIS', 1, 'number of array dimensions'), ('NAXIS1', len(bs.getvalue()), 'Axis length'), ('PCOUNT', 0, 'number of parameters'), ('GCOUNT', 1, 'number of groups'), ] # Add the XINDn keywords proposed by Perry, though nothing is done with # these at the moment if len(hdulist) > 1: for idx, hdu in enumerate(hdulist[1:]): cards.append(('XIND' + str(idx + 1), hdu._header_offset, 'byte offset of extension {}'.format(idx + 1))) cards.append(('COMPRESS', compress, 'Uses gzip compression')) header = Header(cards) return cls._readfrom_internal(_File(bs), header=header)
def fromhdulist(cls, hdulist, compress=False): """ Creates a new FitsHDU from a given HDUList object. Parameters ---------- hdulist : HDUList A valid Headerlet object. compress : bool, optional Gzip compress the FITS file """ fileobj = bs = io.BytesIO() if compress: if hasattr(hdulist, '_file'): name = fileobj_name(hdulist._file) else: name = None fileobj = gzip.GzipFile(name, mode='wb', fileobj=bs) hdulist.writeto(fileobj) if compress: fileobj.close() # A proper HDUList should still be padded out to a multiple of 2880 # technically speaking padding = (_pad_length(bs.tell()) * cls._padding_byte).encode('ascii') bs.write(padding) bs.seek(0) cards = [ ('XTENSION', cls._extension, 'FITS extension'), ('BITPIX', 8, 'array data type'), ('NAXIS', 1, 'number of array dimensions'), ('NAXIS1', len(bs.getvalue()), 'Axis length'), ('PCOUNT', 0, 'number of parameters'), ('GCOUNT', 1, 'number of groups'), ] # Add the XINDn keywords proposed by Perry, though nothing is done with # these at the moment if len(hdulist) > 1: for idx, hdu in enumerate(hdulist[1:]): cards.append(('XIND' + str(idx + 1), hdu._header_offset, f'byte offset of extension {idx + 1}')) cards.append(('COMPRESS', compress, 'Uses gzip compression')) header = Header(cards) return cls._readfrom_internal(_File(bs), header=header)
def from_buff(cls, buff, compress=False, **kwargs): """ Creates a new _AsdfHDU from a given AsdfFile object. Parameters ---------- buff : io.BytesIO A buffer containing an ASDF metadata tree compress : bool, optional Gzip compress the contents of the ASDF HDU """ if compress: buff = gzip.GzipFile(fileobj=buff, mode='wb') # A proper HDU should still be padded out to a multiple of 2880 # technically speaking data_length = buff.tell() padding = (_pad_length(data_length) * cls._padding_byte).encode('ascii') buff.write(padding) buff.seek(0) cards = [ ('XTENSION', cls._extension, 'ASDF extension'), ('BITPIX', 8, 'array data type'), ('NAXIS', 1, 'number of array dimensions'), ('NAXIS1', data_length, 'Axis length'), ('PCOUNT', 0, 'number of parameters'), ('GCOUNT', 1, 'number of groups'), ('COMPRESS', compress, 'Uses gzip compression'), ('EXTNAME', cls._extension, 'Name of ASDF extension'), ] header = Header(cards) return cls._readfrom_internal(_File(buff), header=header)
def __init__(self, name, header): """ Construct a `StreamingHDU` object given a file name and a header. Parameters ---------- name : file path, file object, or file like object The file to which the header and data will be streamed. If opened, the file object must be opened in a writeable binary mode such as 'wb' or 'ab+'. header : `Header` instance The header object associated with the data to be written to the file. Notes ----- The file will be opened and the header appended to the end of the file. If the file does not already exist, it will be created, and if the header represents a Primary header, it will be written to the beginning of the file. If the file does not exist and the provided header is not a Primary header, a default Primary HDU will be inserted at the beginning of the file and the provided header will be added as the first extension. If the file does already exist, but the provided header represents a Primary header, the header will be modified to an image extension header and appended to the end of the file. """ if isinstance(name, gzip.GzipFile): raise TypeError('StreamingHDU not supported for GzipFile objects.') self._header = header.copy() # handle a file object instead of a file name filename = fileobj_name(name) or '' # Check if the file already exists. If it does not, check to see # if we were provided with a Primary Header. If not we will need # to prepend a default PrimaryHDU to the file before writing the # given header. newfile = False if filename: if not os.path.exists(filename) or os.path.getsize(filename) == 0: newfile = True elif (hasattr(name, 'len') and name.len == 0): newfile = True if newfile: if 'SIMPLE' not in self._header: hdulist = HDUList([PrimaryHDU()]) hdulist.writeto(name, 'exception') else: # This will not be the first extension in the file so we # must change the Primary header provided into an image # extension header. if 'SIMPLE' in self._header: self._header.set('XTENSION', 'IMAGE', 'Image extension', after='SIMPLE') del self._header['SIMPLE'] if 'PCOUNT' not in self._header: dim = self._header['NAXIS'] if dim == 0: dim = '' else: dim = str(dim) self._header.set('PCOUNT', 0, 'number of parameters', after='NAXIS' + dim) if 'GCOUNT' not in self._header: self._header.set('GCOUNT', 1, 'number of groups', after='PCOUNT') self._ffo = _File(name, 'append') # TODO : Fix this once the HDU writing API is cleaned up tmp_hdu = _BaseHDU() # Passing self._header as an argument to _BaseHDU() will cause its # values to be modified in undesired ways...need to have a better way # of doing this tmp_hdu._header = self._header self._header_offset = tmp_hdu._writeheader(self._ffo)[0] self._data_offset = self._ffo.tell() self._size = self.size if self._size != 0: self.writecomplete = False else: self.writecomplete = True
def __init__(self, name, header): """ Construct a `StreamingHDU` object given a file name and a header. Parameters ---------- name : path-like or file-like The file to which the header and data will be streamed. If opened, the file object must be opened in a writeable binary mode such as 'wb' or 'ab+'. header : `Header` instance The header object associated with the data to be written to the file. Notes ----- The file will be opened and the header appended to the end of the file. If the file does not already exist, it will be created, and if the header represents a Primary header, it will be written to the beginning of the file. If the file does not exist and the provided header is not a Primary header, a default Primary HDU will be inserted at the beginning of the file and the provided header will be added as the first extension. If the file does already exist, but the provided header represents a Primary header, the header will be modified to an image extension header and appended to the end of the file. """ if isinstance(name, gzip.GzipFile): raise TypeError('StreamingHDU not supported for GzipFile objects.') self._header = header.copy() # handle a file object instead of a file name filename = fileobj_name(name) or '' # Check if the file already exists. If it does not, check to see # if we were provided with a Primary Header. If not we will need # to prepend a default PrimaryHDU to the file before writing the # given header. newfile = False if filename: if not os.path.exists(filename) or os.path.getsize(filename) == 0: newfile = True elif (hasattr(name, 'len') and name.len == 0): newfile = True if newfile: if 'SIMPLE' not in self._header: hdulist = HDUList([PrimaryHDU()]) hdulist.writeto(name, 'exception') else: # This will not be the first extension in the file so we # must change the Primary header provided into an image # extension header. if 'SIMPLE' in self._header: self._header.set('XTENSION', 'IMAGE', 'Image extension', after='SIMPLE') del self._header['SIMPLE'] if 'PCOUNT' not in self._header: dim = self._header['NAXIS'] if dim == 0: dim = '' else: dim = str(dim) self._header.set('PCOUNT', 0, 'number of parameters', after='NAXIS' + dim) if 'GCOUNT' not in self._header: self._header.set('GCOUNT', 1, 'number of groups', after='PCOUNT') self._ffo = _File(name, 'append') # TODO : Fix this once the HDU writing API is cleaned up tmp_hdu = _BaseHDU() # Passing self._header as an argument to _BaseHDU() will cause its # values to be modified in undesired ways...need to have a better way # of doing this tmp_hdu._header = self._header self._header_offset = tmp_hdu._writeheader(self._ffo)[0] self._data_offset = self._ffo.tell() self._size = self.size if self._size != 0: self.writecomplete = False else: self.writecomplete = True