def load_sources(filename): """ Open a file, read contents, return a list of all the sources in that file. Parameters ---------- filename : str Filename to be read Return ------ catalog : [`class:AegeanTools.models.ComponentSource`, ...] A list of source components """ table = catalogs.load_table(filename) required_cols = ['ra', 'dec', 'peak_flux', 'a', 'b', 'pa'] good = True for c in required_cols: if c not in table.colnames: logging.error("Column {0} not found".format(c)) good = False if not good: logging.error("Some required columns missing") return None catalog = catalogs.table_to_source_list(table) logging.info("read {0} sources from {1}".format(len(catalog), filename)) return catalog
def load_sources(filename): """ Open a file, read contents, return a list of all the sources in that file. @param filename: @return: list of OutputSource objects """ catalog = catalogs.table_to_source_list(catalogs.load_table(filename)) logging.info("read {0} sources from {1}".format(len(catalog), filename)) return catalog
def test_load_sources_missing_columns(): filename = 'tests/test_files/1904_comp.fits' table = catalogs.load_table(filename) table.rename_column('ra', 'RAJ2000') table.write('dlme.fits') cat = ar.load_sources('dlme.fits') if os.path.exists('dlme.fits'): os.remove('dlme.fits') if cat is not None: raise AssertionError("Missing columns should be caught, but weren't") return
def test_load_table_write_table(): catalog = [OutputSource()] for fmt in ['csv', 'vo']: fout = 'a.'+fmt cat.save_catalog(fout, catalog, meta=None) fout = 'a_comp.'+fmt tab = cat.load_table(fout) if not len(tab) == len(catalog): raise AssertionError() os.remove(fout) cat.save_catalog('a.csv', catalog, meta=None) tab = cat.load_table('a_comp.csv') cat.write_table(tab, 'a.csv') if not os.path.exists('a.csv'): raise AssertionError() os.remove('a.csv') assert_raises(Exception, cat.write_table, tab, 'bla.fox') assert_raises(Exception, cat.load_table, 'file.fox')
def test_load_table_write_table(): """Test that we can write and load tables with various file formats""" catalog = [OutputSource()] for fmt in ['csv', 'vo']: fout = 'a.'+fmt cat.save_catalog(fout, catalog, meta=None) fout = 'a_comp.'+fmt tab = cat.load_table(fout) if not len(tab) == len(catalog): raise AssertionError() # by keeping this out of the loop, we make use of the internal remove function os.remove(fout) cat.save_catalog('a.csv', catalog, meta=None) tab = cat.load_table('a_comp.csv') cat.write_table(tab, 'a.csv') if not os.path.exists('a.csv'): raise AssertionError() os.remove('a.csv') assert_raises(Exception, cat.write_table, tab, 'bla.fox') assert_raises(Exception, cat.load_table, 'file.fox')
def test_load_table_write_table(): """Test that we can write and load tables with various file formats""" catalog = [ComponentSource()] for fmt in ['csv', 'vo']: fout = 'a.' + fmt cat.save_catalog(fout, catalog, meta=None) fout = 'a_comp.' + fmt tab = cat.load_table(fout) if not len(tab) == len(catalog): raise AssertionError() # by keeping this out of the loop, we make use of the internal remove function os.remove(fout) cat.save_catalog('a.csv', catalog, meta=None) tab = cat.load_table('a_comp.csv') cat.write_table(tab, 'a.csv') if not os.path.exists('a.csv'): raise AssertionError() os.remove('a.csv') assert_raises(Exception, cat.write_table, tab, 'bla.fox') assert_raises(Exception, cat.load_table, 'file.fox')
def load_sources(filename, ra_col='ra', dec_col='dec', peak_col='peak_flux', a_col='a', b_col='b', pa_col='pa'): """ Open a file, read contents, return a list of all the sources in that file. Parameters ---------- filename : str Filename to be read ra_col, dec_col, peak_col, a_col, b_col, pa_col : str The column names for each of the parameters. Default = ['ra', 'dec', 'peak_flux', 'a', 'b', 'pa'] Return ------ catalog : [`class:AegeanTools.models.ComponentSource`, ...] A list of source components """ table = catalogs.load_table(filename) required_cols = [ra_col, dec_col, peak_col, a_col, b_col, pa_col] #required_cols = ['ra','dec','peak_flux','a','b','pa'] good = True for c in required_cols: if c not in table.colnames: logging.error("Column {0} not found".format(c)) good = False if not good: logging.error("Some required columns missing or mis-labeled") return None # rename the table columns for old, new in zip([ra_col, dec_col, peak_col, a_col, b_col, pa_col], ['ra', 'dec', 'peak_flux', 'a', 'b', 'pa']): table.rename_column(old, new) catalog = catalogs.table_to_source_list(table) logging.info("read {0} sources from {1}".format(len(catalog), filename)) return catalog
def test_write_fits_table_variable_uuid_lengths(): """Test that the length of the UUID column is appropriate""" catalog = [] for l in range(10): c = ComponentSource() c.ra_str = c.dec_str = "hello!" c.uuid = 'source-{0:d}'.format(2**l) catalog.append(c) cat.save_catalog('a.fits', catalog, meta={'Purpose': 'Testing'}) if not os.path.exists('a_comp.fits'): raise AssertionError() rcat = cat.load_table('a_comp.fits') for src1, src2 in zip(rcat, catalog): if len(src1['uuid']) != len(src2.uuid): print("len mismatch for source {0}".format(src1)) print("uuid should be len={0}".format(len(src2.uuid))) raise AssertionError("UUID col is of wrong length") os.remove('a_comp.fits') return
save_append = '_src_table_comp.vot' # Create a string that stores the location of the Stokes Q image Sto_Q_file = data_loc + 'Sto_Q_' + mosaic_area + '.fits' # Create a string that stores the location of the Stokes U image Sto_U_file = data_loc + 'Sto_U_' + mosaic_area + '.fits' # Create a string that stores the location of the Stokes Q source table. Sto_Q_table_loc = data_loc + 'Sto_Q_' + mosaic_area + save_append # Create a string that stores the location of the Stokes U source table. Sto_U_table_loc = data_loc + 'Sto_U_' + mosaic_area + save_append # Obtain a source list for Stokes Q, from the catalogue Q_srclist = catalogs.table_to_source_list(catalogs.load_table(Sto_Q_table_loc)) # Obtain a source list for Stokes U, from the catalogue U_srclist = catalogs.table_to_source_list(catalogs.load_table(Sto_U_table_loc)) # Open the Stokes Q FITS file, and extract its data and header Q_hdulist = fits.open(Sto_Q_file) Q_data = Q_hdulist[0].data Q_hdr = Q_hdulist[0].header # Create two empty matrices, one to hold the first mask, and the other to hold # the second mask first_mask = np.zeros(np.shape(Q_data), dtype=bool) second_mask = np.zeros(np.shape(Q_data), dtype=bool) # Run the generate mask function on Stokes Q, to create a mask that covers
save_append = '_src_table_comp.vot' # Create a string that stores the location of the Stokes Q image Sto_Q_file = data_loc + 'Sto_Q_' + mosaic_area + '.fits' # Create a string that stores the location of the Stokes U image Sto_U_file = data_loc + 'Sto_U_' + mosaic_area + '.fits' # Create a string that stores the location of the Stokes Q source table. Sto_Q_table_loc = data_loc + 'Sto_Q_' + mosaic_area + save_append # Create a string that stores the location of the Stokes U source table. Sto_U_table_loc = data_loc + 'Sto_U_' + mosaic_area + save_append # Obtain a source list for Stokes Q, from the catalogue Q_srclist = catalogs.table_to_source_list(catalogs.load_table(Sto_Q_table_loc)) # Obtain a source list for Stokes U, from the catalogue U_srclist = catalogs.table_to_source_list(catalogs.load_table(Sto_U_table_loc)) # Open the Stokes Q FITS file, and extract its data and header Q_hdulist = fits.open(Sto_Q_file) Q_data = Q_hdulist[0].data Q_hdr = Q_hdulist[0].header # Create two empty matrices, one to hold the first mask, and the other to hold # the second mask first_mask = np.zeros(np.shape(Q_data), dtype = bool) second_mask = np.zeros(np.shape(Q_data), dtype = bool) # Run the generate mask function on Stokes Q, to create a mask that covers
def load(filename): print "load", filename table = catalogs.load_table(filename) return table