def test_create_groupdata(self): """ Basic test for creating GroupData from scratch. """ imdata = np.arange(100.0) imdata.shape = (10, 1, 1, 2, 5) pdata1 = np.arange(10, dtype=np.float32) + 0.1 pdata2 = 42.0 x = fits.hdu.groups.GroupData(imdata, parnames=['abc', 'xyz'], pardata=[pdata1, pdata2], bitpix=-32) assert x.parnames == ['abc', 'xyz'] assert (x.par('abc') == pdata1).all() assert (x.par('xyz') == ([pdata2] * len(x))).all() assert (x.data == imdata).all() # Test putting the data into a GroupsHDU and round-tripping it ghdu = fits.GroupsHDU(data=x) ghdu.writeto(self.temp('test.fits')) with fits.open(self.temp('test.fits')) as h: hdr = h[0].header assert hdr['GCOUNT'] == 10 assert hdr['PCOUNT'] == 2 assert hdr['NAXIS'] == 5 assert hdr['NAXIS1'] == 0 assert hdr['NAXIS2'] == 5 assert hdr['NAXIS3'] == 2 assert hdr['NAXIS4'] == 1 assert hdr['NAXIS5'] == 1 assert h[0].data.parnames == ['abc', 'xyz'] assert comparerecords(h[0].data, x)
def test_append_groupshdu_to_non_empty_list(self): """Tests appending a Simple GroupsHDU to an empty HDUList.""" hdul = fits.HDUList() hdu = fits.PrimaryHDU(np.arange(100, dtype=np.int32)) hdul.append(hdu) hdu = fits.GroupsHDU() assert_raises(ValueError, hdul.append, hdu)
def test_insert_groupshdu_to_begin_of_hdulist_with_groupshdu(self): """ Tests inserting a Simple GroupsHDU to the beginning of an HDUList that that already contains a GroupsHDU. """ hdul = fits.HDUList() hdu = fits.GroupsHDU() hdul.insert(0, hdu) assert_raises(ValueError, hdul.insert, 0, hdu)
def test_insert_groupshdu_to_empty_list(self): """Tests inserting a Simple GroupsHDU to an empty HDUList.""" hdul = fits.HDUList() hdu = fits.GroupsHDU() hdul.insert(0, hdu) info = [(0, 'PRIMARY', 'GroupsHDU', 8, (), '', '1 Groups 0 Parameters')] assert hdul.info(output=False) == info hdul.writeto(self.temp('test-insert.fits')) assert fits.info(self.temp('test-insert.fits'), output=False) == info
def test_groups_hdu_data(self): imdata = np.arange(100.0) imdata.shape = (10, 1, 1, 2, 5) pdata1 = np.arange(10) + 0.1 pdata2 = 42 x = fits.hdu.groups.GroupData(imdata, parnames=['abc', 'xyz'], pardata=[pdata1, pdata2], bitpix=-32) hdu = fits.GroupsHDU(x) hdu.writeto(self.temp('tmp.fits'), clobber=True, checksum=True) with fits.open(self.temp('tmp.fits'), checksum=True) as hdul: assert comparerecords(hdul[0].data, hdu.data) assert 'CHECKSUM' in hdul[0].header assert hdul[0].header['CHECKSUM'] == '3eDQAZDO4dDOAZDO' assert 'DATASUM' in hdul[0].header assert hdul[0].header['DATASUM'] == '2797758084'
def test_duplicate_parameter(self): """ Tests support for multiple parameters of the same name, and ensures that the data in duplicate parameters are returned as a single summed value. """ imdata = np.arange(100.0) imdata.shape = (10, 1, 1, 2, 5) pdata1 = np.arange(10, dtype=np.float32) + 1 pdata2 = 42.0 x = fits.hdu.groups.GroupData(imdata, parnames=['abc', 'xyz', 'abc'], pardata=[pdata1, pdata2, pdata1], bitpix=-32) assert x.parnames == ['abc', 'xyz', 'abc'] assert (x.par('abc') == pdata1 * 2).all() assert x[0].par('abc') == 2 # Test setting a parameter x[0].setpar(0, 2) assert x[0].par('abc') == 3 assert_raises(ValueError, x[0].setpar, 'abc', 2) x[0].setpar('abc', (2, 3)) assert x[0].par('abc') == 5 assert x.par('abc')[0] == 5 assert (x.par('abc')[1:] == pdata1[1:] * 2).all() # Test round-trip ghdu = fits.GroupsHDU(data=x) ghdu.writeto(self.temp('test.fits')) with fits.open(self.temp('test.fits')) as h: hdr = h[0].header assert hdr['PCOUNT'] == 3 assert hdr['PTYPE1'] == 'abc' assert hdr['PTYPE2'] == 'xyz' assert hdr['PTYPE3'] == 'abc' assert x.parnames == ['abc', 'xyz', 'abc'] assert x.dtype.names == ('abc', 'xyz', '_abc', 'DATA') assert x.par('abc')[0] == 5 assert (x.par('abc')[1:] == pdata1[1:] * 2).all()
def test_insert_groupshdu_to_non_empty_list(self): """Tests inserting a Simple GroupsHDU to an empty HDUList.""" hdul = fits.HDUList() hdu = fits.PrimaryHDU(np.arange(100, dtype=np.int32)) hdul.insert(0, hdu) hdu = fits.GroupsHDU() assert_raises(ValueError, hdul.insert, 1, hdu) info = [(0, 'PRIMARY', 'GroupsHDU', 8, (), '', '1 Groups 0 Parameters'), (1, '', 'ImageHDU', 6, (100,), 'int32', '')] hdul.insert(0, hdu) assert hdul.info(output=False) == info hdul.writeto(self.temp('test-insert.fits')) assert fits.info(self.temp('test-insert.fits'), output=False) == info
def make_primary_big( self): ### Time means: 1 minute, two minutes or more!!!! """ Creates the primary header data unit (HDU). This function generates header keywords from the file headers/primary.tpl \ """ # Make a new FITS HDU muser.DROutputAntennas*(muser.DROutputAntennas-1)/2=60*60/2 #group = 60*1000/25*8*(self.muser.antennas * (self.muser.antennas - 1) // 2) group = self.visibility_data.shape[0] pdata1 = np.asarray([a for a in self.datauu], dtype=np.float32) pdata2 = np.asarray([a for a in self.datavv], dtype=np.float32) pdata3 = np.asarray([a for a in self.dataww], dtype=np.float32) pdata4 = np.asarray([a for a in self.databaseline], dtype=np.float32) pdata5 = np.asarray([a for a in self.datadate1], dtype=np.float32) pdata6 = np.asarray([a for a in self.datadate2], dtype=np.float32) pdata7 = np.asarray([a for a in self.datasource], dtype=np.int32) pdata8 = np.asarray([a for a in self.datafreqsel], dtype=np.int32) pdata9 = 1 groupData = pf.GroupData(self.visibility_data, bitpix=-32, parnames=[ 'UU', 'VV', 'WW', 'BASELINE', 'DATE', 'DATE', 'SOURCE', 'FREQSEL', 'INTTIM' ], pardata=[ pdata1, pdata2, pdata3, pdata4, pdata5, pdata6, pdata7, pdata8, pdata9 ]) hdu = pf.GroupsHDU(groupData) # Generate headers from config file primary = { 'BITPIX': 8, 'EXTEND': True, 'NAXIS': 0, 'PCOUNT': 0, 'GROUPS': True, 'GCOUNT': 0 } primaryTable = { 'PZERO4': 0.0, 'PZERO5': 0.0, 'PZERO6': 0.0, 'PZERO1': 0.0, 'PZERO2': 0.0, 'PZERO3': 0.0, 'CDELT2': 0, 'OBJECT': 'MUSER-1', 'CDELT4': 25000000.0, 'CDELT5': 0.0, 'CDELT6': 0.0, 'DATE-OBS': '2014-05-12T13:14:59.988771120', 'OBSDEC': 18.113205304565984, 'OBSRA': 3.266187801373725, 'PSCAL5': 1.0, 'PSCAL4': 1.0, 'PSCAL6': 1.0, 'PSCAL1': 1.0, 'PSCAL3': 1.0, 'PSCAL2': 1.0, 'INSTRUME': 'MUSER', 'CRVAL6': 0.0, 'CDELT3': -1, 'CRPIX6': 1.0, 'EPOCH': 2000.0, 'CRPIX4': 1.0, 'CRPIX5': 1.0, 'CRVAL4': 400000000, 'CRVAL5': 0.0, 'CRVAL2': 0.0, 'CRVAL3': -2, 'CRPIX2': 1.0, 'CRPIX3': 1.0, 'TELESCOP': 'MUSER', 'CTYPE6': 'DEC', 'CTYPE5': 'RA', 'CTYPE4': 'FREQ', 'CTYPE3': 'STOKES', 'CTYPE2': 'COMPLEX', 'BSCALE': 1.0 } primaryTable['OBSRA'] = self.muser.ra_sum primaryTable['OBSDEC'] = self.muser.dec_sum primaryTable[ 'DATE-OBS'] = self.muser.current_frame_time.get_fits_date_time( ) # self.muser.obs_date + "T00:00:00.0" primaryTable['OBJECT'] = "MUSER-" + str(self.muser.sub_array) primaryTable['CRVAL3'] = self.muser.polarization - 2 if self.muser.sub_array == 2: primaryTable['CRVAL4'] = 2000000000 for key in primary: hdu.header.set(key, primary[key]) for key in primaryTable: hdu.header.set(key, primaryTable[key]) hdu.verify() # Will raise a warning if there's an issue return hdu
def make_primary(self): """ Creates the primary header data unit (HDU). This function generates header keywords from the file headers/primary.tpl Parameters ---------- config: string filename of xml configuration file, defaults to 'config,xml' """ # Make a new FITS HDU muser.DROutputAntennas*(muser.DROutputAntennas-1)/2=60*60/2 imdata = np.arange( self.muser.antennas * (self.muser.antennas - 1) / 2 * 16 * 3).reshape(self.muser.antennas * (self.muser.antennas - 1) / 2, 1, 1, 16, 1, 3) pdata1 = [itemgetter(0)(i) for i in self.muser.uvws_sum] pdata2 = [itemgetter(1)(i) for i in self.muser.uvws_sum] pdata3 = [itemgetter(2)(i) for i in self.muser.uvws_sum] pdata4 = self.muser.baseline pdata5 = self.muser.obs_date_sum # - self.muser.obs_date_sum0 pdata6 = self.muser.obs_time_sum groupData = pf.GroupData( imdata, bitpix=-32, parnames=['UU', 'VV', 'WW', 'BASELINE', 'DATE', 'DATE'], pardata=[pdata1, pdata2, pdata3, pdata4, pdata5, pdata6]) hdu = pf.GroupsHDU(groupData) i = 0 VISDATA = np.ndarray( shape=(self.muser.antennas * (self.muser.antennas - 1) / 2, 1, 1, 16, 1, 3), dtype=float) for antenna1 in range(0, self.muser.antennas - 1): for antenna2 in range(antenna1 + 1, self.muser.antennas): for channel in range(0, 16): VISDATA[i][0][0][channel][0] = [ self.muser.baseline_data[i][channel].real, self.muser.baseline_data[i][channel].imag, 1.0 ] i += 1 # print "self.muser.DROutputAntennas", self.muser.DROutputAntennas * (self.muser.DROutputAntennas - 1) // 2 for i in range(0, self.muser.antennas * (self.muser.antennas - 1) // 2): hdu.data.data[i] = VISDATA[i] i += 1 # Generate headers from config file # primary = self.parse_config('PRIMARY') # primaryTable = self.parse_config('PRIMARYTable') primary = { 'BITPIX': 8, 'EXTEND': True, 'NAXIS': 0, 'PCOUNT': 0, 'GROUPS': True, 'GCOUNT': 0 } primaryTable = { 'PZERO4': 0.0, 'PZERO5': 0.0, 'PZERO6': 0.0, 'PZERO1': 0.0, 'PZERO2': 0.0, 'PZERO3': 0.0, 'CDELT2': 0, 'OBJECT': 'MUSER-1', 'CDELT4': 25000000.0, 'CDELT5': 0.0, 'CDELT6': 0.0, 'DATE-OBS': '2014-05-12T13:14:59.988771120', 'STRT-OBS': '2014-05-12T13:14:59.988771120', 'END-OBS': '2014-05-12T13:14:59.988771120', 'OBSDEC': 18.113205304565984, 'OBSRA': 3.266187801373725, 'PSCAL5': 1.0, 'PSCAL4': 1.0, 'PSCAL6': 1.0, 'PSCAL1': 1.0, 'PSCAL3': 1.0, 'PSCAL2': 1.0, 'INSTRUME': 'MUSER', 'CRVAL6': 0.0, 'CDELT3': -1, 'CRPIX6': 1.0, 'EPOCH': 2000.0, 'CRPIX4': 1.0, 'CRPIX5': 1.0, 'CRVAL4': 400000000, 'CRVAL5': 0.0, 'CRVAL2': 0.0, 'CRVAL3': -2, 'CRPIX2': 1.0, 'CRPIX3': 1.0, 'TELESCOP': 'MUSER', 'CTYPE6': 'DEC', 'CTYPE5': 'RA', 'CTYPE4': 'FREQ', 'CTYPE3': 'STOKES', 'CTYPE2': 'COMPLEX', 'BSCALE': 1.0 } primaryTable['OBSRA'] = self.muser.ra_sum #PZERO5:2456789.5 primaryTable['OBSDEC'] = self.muser.dec_sum primaryTable[ 'DATE-OBS'] = self.muser.current_frame_time.get_fits_date_time() primaryTable[ 'STRT-OBS'] = self.muser.start_date_time_fits.get_fits_date_time() primaryTable[ 'END-OBS'] = self.muser.end_date_time_fits.get_fits_date_time() # primaryTable['PZERO5'] = self.muser.obs_date_sum primaryTable['OBJECT'] = "MUSER-" + str(self.muser.sub_array) primaryTable['CRVAL3'] = self.muser.polarization - 2 if self.muser.sub_array == 2: primaryTable['CRVAL4'] = 2000000000 for key in primary: hdu.header.set(key, primary[key]) for key in primaryTable: hdu.header.set(key, primaryTable[key]) hdu.verify() # Will raise a warning if there's an issue return hdu
def _data_to_HDU(self, _data, header): """ Method that converts structured numpy.ndarray with data and instance of ``PyFits.Header`` class to the instance of ``PyFits.GroupsHDU`` class. :param _data: Numpy.ndarray with dtype = [('uvw', '<f8', (3,)), ('time', '<f8'), ('baseline', 'int'), ('hands', 'complex', (nif, nstokes,)), ('weights', '<f8', (nif, nstokes,))] :param header: Instance of ``PyFits.Header`` class :return: Instance of ``PyFits.GroupsHDU`` class """ # Constructing array (3, N, #stokes, #if,) temp = np.vstack((_data['hands'].real[np.newaxis, :], _data['hands'].imag[np.newaxis, :], _data['weights'][np.newaxis, :])) # FIXME: PZEROi has different i for same key in different FITS-files! # Construct corresponding arrays of parameter values _data_copy = _data.copy() _data_copy['uvw'][:, 0] = (_data_copy['uvw'][:, 0] + self.hdu.header['PZERO1']) *\ self.hdu.header['PSCAL1'] _data_copy['uvw'][:, 1] = (_data_copy['uvw'][:, 1] + self.hdu.header['PZERO2']) *\ self.hdu.header['PSCAL2'] _data_copy['uvw'][:, 2] = (_data_copy['uvw'][:, 2] + self.hdu.header['PZERO3']) *\ self.hdu.header['PSCAL3'] _data_copy['time'] = (_data_copy['time'] + self.hdu.header['PZERO4']) *\ self.hdu.header['PSCAL4'] _data_copy['baseline'] = (_data_copy['baseline'] + self.hdu.header['PZERO6']) *\ self.hdu.header['PSCAL6'] # Now roll axis 0 (real,imag,weight) to 3rd position # (3, N, #if, #stokes) => (N, #if, #stokes, 3) temp = np.rollaxis(temp, 0, 4) # First, add dimensions: for i in range(self.ndim_ones): temp = np.expand_dims(temp, axis=4) # Now temp has shape (N, #if, #stokes, 3, 1, 1, 1) # Change dimensions to pyfits.hdu.data['DATA'] dimensions temp = change_shape(temp, self.data_of__data, { key: self.data_of_data[key][0] for key in self.data_of_data.keys() }) # => (N, 1, 1, #if, 1, #stokes, 3) as in 'DATA' part of pyfits recarray # Write regular array data (``temp``) and corresponding parameters to # instances of pyfits.GroupsHDU imdata = temp # Use parameter values of saving data to find indexes of this # parameters in the original data entry of HDU if len(_data) < len(self.hdu.data): original_data = _to_one_ndarray(self.hdu.data, 'UU---SIN', 'VV---SIN', 'WW---SIN', 'DATE', 'BASELINE') saving_data = np.dstack( (np.array(np.hsplit(_data_copy['uvw'], 3)).T, _data_copy['time'], _data_copy['baseline'])) saving_data = np.squeeze(saving_data) # TODO: this is funnest workaround:) par_indxs = np.hstack( index_of(saving_data.sum(axis=1), original_data.sum(axis=1))) elif len(_data) > len(self.hdu.data): raise Exception('There must be equal or less visibilities to\ save!') else: print "Saving uvdata - number of groups hasn't changed..." par_indxs = np.arange(len(self.hdu.data)) parnames = self.hdu.data.parnames pardata = list() for name in parnames: par = self.hdu.data[name][par_indxs] par = (par - self.hdu.header['PZERO' + str(self.par_dict[name])]) /\ self.hdu.header['PSCAL' + str(self.par_dict[name])] pardata.append(par) # If two parameters for one value (like ``DATE``) for name in parnames: if parnames.count(name) == 2: indx_to_zero = parnames.index(name) + 1 break # then zero array for second parameter with the same name # TODO: use dtype from ``BITPIX`` keyword pardata[indx_to_zero] = np.zeros(len(par_indxs), dtype=float) a = pf.GroupData(imdata, parnames=parnames, pardata=pardata, bitpix=-32) b = pf.GroupsHDU(a) # PyFits updates header using given data (``GCOUNT``) b.header = self.hdu.header return b