Ejemplo n.º 1
0
def load_table(filename):
    """
    Load a table from a given file.

    Supports csv, tab, tex, vo, vot, xml, fits, and hdf5.

    Parameters
    ----------
    filename : str
        File to read

    Returns
    -------
    table : Table
        Table of data.
    """
    supported = get_table_formats()

    fmt = os.path.splitext(filename)[-1][1:].lower()  # extension sans '.'

    if fmt in ['csv', 'tab', 'tex'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = ascii.read(filename)
    elif fmt in ['vo', 'vot', 'xml', 'fits', 'hdf5'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = Table.read(filename)
    else:
        log.error("Table format not recognized or supported")
        log.error("{0} [{1}]".format(filename, fmt))
        raise Exception("Table format not recognized or supported")
    return t
Ejemplo n.º 2
0
def load_table(filename):
    """
    Load a table from a given file.

    Supports csv, tab, tex, vo, vot, xml, fits, and hdf5.

    Parameters
    ----------
    filename : str
        File to read

    Returns
    -------
    table : Table
        Table of data.
    """
    supported = get_table_formats()

    fmt = os.path.splitext(filename)[-1][1:].lower()  # extension sans '.'

    if fmt in ['csv', 'tab', 'tex'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = ascii.read(filename)
    elif fmt in ['vo', 'vot', 'xml', 'fits', 'hdf5'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = Table.read(filename)
    else:
        log.error("Table format not recognized or supported")
        log.error("{0} [{1}]".format(filename, fmt))
        raise Exception("Table format not recognized or supported")
    return t
def main2():
    """
    As per main() but we operate on columns instead of rows. So much faster!
    """
    global colnames

    if len(sys.argv) != 3:
        print "Usage ", __file__, " inputcatalog outputcatalog"
        sys.exit()

    infile = sys.argv[-2]
    outfile = sys.argv[-1]

    print 'read'
    master = Table.read(infile)
    print 'colnames'
    colnames = get_colnames(master)
    print 'filtering'
    # don't have to worry about rows where the first col has the source we want
    second = np.where(master['local_rms_2'] <= master['local_rms_1'])
    master = shuffle_left_rows(master, second, 2)

    print "strip cols"
    master = strip_cols(master)
    print 'write'
    if os.path.exists(outfile):
        os.remove(outfile)
    master.write(outfile)
Ejemplo n.º 4
0
def fits_to_packets(file):
    logger.info(f'Processing fits file {file}')
    parser = StixTCTMParser()
    control = Table.read(str(file), hdu=1)
    data = Table.read(str(file), hdu=2)
    binary_packets = [
        ConstBitArray(hex=hex).tobytes() for hex in data['data']
    ][-2:]
    packets = [parser.parse_binary(bd)[0] for bd in binary_packets][-2:]
    if np.abs([((len(data['data'][i]) // 2) - (control['data_len'][i] + 7))
               for i in range(len(data))]).sum() > 0:
        raise ValueError('Packet size and expected length do not match')
    # packets = list(chain.from_iterable(packets))
    # Filter keeping only TM packets
    # packets = list(filter(lambda x: x['header']['TMTC'] == 'TM', packets))
    # Packet ordering is not guaranteed so sort by coarse time then seq count
    # packets.sort(key=lambda x: (x['header']['coarse_time'], x['header']['seq_count']))
    return packets
Ejemplo n.º 5
0
def load_table(filename):
    """

    :param filename:
    :return:
    """
    supported = get_table_formats()

    fmt = os.path.splitext(filename)[-1][1:].lower()  #extension sans '.'

    if fmt in ['csv', 'tab', 'tex'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = ascii.read(filename)
    elif fmt in ['vo', 'vot', 'xml', 'fits','hdf5'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = Table.read(filename)
    else:
        log.error("Table format not recognized or supported")
        log.error("{0} [{1}]".format(filename,fmt))
        t= None
    return t
Ejemplo n.º 6
0
def load_table(filename):
    """

    :param filename:
    :return:
    """
    supported = get_table_formats()

    fmt = os.path.splitext(filename)[-1][1:].lower()  #extension sans '.'

    if fmt in ['csv', 'tab', 'tex'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = ascii.read(filename)
    elif fmt in ['vo', 'vot', 'xml', 'fits', 'hdf5'] and fmt in supported:
        log.info("Reading file {0}".format(filename))
        t = Table.read(filename)
    else:
        log.error("Table format not recognized or supported")
        log.error("{0} [{1}]".format(filename, fmt))
        t = None
    return t
Ejemplo n.º 7
0
    def load(self, catalogue_file, **kwargs):
        """ load the given file and create the object.
        
        kwargs can have any build option like key_ra, key_mag etc.
        """
        # ---------------------
        # - Parsing the input
        if catalogue_file.endswith(".fits"):
            # loading from fits file
            fits = pf.open(catalogue_file)
            header = fits[self._build_properties["data_index"]].header
            data = fits[self._build_properties["data_index"]].data
            if type(data) == pf.fitsrec.FITS_rec:
                from astrobject.utils.tools import fitsrec_to_dict
                data = TableColumns(fitsrec_to_dict(data))

        elif catalogue_file.endswith(".pkl"):
            # loading from pkl
            fits = None
            header = None
            data = load_pkl(catalogue_file)
            if not type(data) is Table:
                try:
                    data = Table(data)
                except:
                    warnings.warn(
                        "Convertion of 'data' into astropy Table failed")
        else:
            fits = None
            header = None
            format_ = kwargs.pop("format", "ascii")
            data = Table.read(catalogue_file, format=format_, **kwargs)

        # ---------------------
        # - Calling Creates
        self.create(data, header, **kwargs)
        self._properties["filename"] = catalogue_file
        self._derived_properties["fits"] = fits
Ejemplo n.º 8
0
 def load(self,catalogue_file,**kwargs):
     """ load the given file and create the object.
     
     kwargs can have any build option like key_ra, key_mag etc.
     """
     # ---------------------
     # - Parsing the input
     if catalogue_file.endswith(".fits"):
         # loading from fits file
         fits   = pf.open(catalogue_file)
         header = fits[self._build_properties["data_index"]].header
         data   = fits[self._build_properties["data_index"]].data
         if type(data) == pf.fitsrec.FITS_rec:
             from astrobject.utils.tools import fitsrec_to_dict
             data = TableColumns(fitsrec_to_dict(data))
             
     elif catalogue_file.endswith(".pkl"):
         # loading from pkl
         fits = None
         header = None
         data = load_pkl(catalogue_file)
         if not type(data) is Table:
             try:
                 data = Table(data)
             except:
                 warnings.warn("Convertion of 'data' into astropy Table failed")
     else:
         fits   = None
         header = None
         format_ = kwargs.pop("format","ascii")
         data   = Table.read(catalogue_file,format=format_,**kwargs)
         
     # ---------------------
     # - Calling Creates
     self.create(data, header, **kwargs)
     self._properties["filename"] = catalogue_file
     self._derived_properties["fits"] = fits
Ejemplo n.º 9
0
166
174
181
189
197
204
212
220
227
""".split()

killring = ['Name', 'island_deep', 'source_deep', 'uuid_deep', 'flags_deep']
killring.extend(['uuid_{0}'.format(f) for f in mids])

print "table load"
tab = Table.read(input)  # "all_wide.fits")
# remove columns we don't want
print "killing columns"
for n in killring:
    if n in tab.colnames:
        del tab[n]

print "renaming deep -> wide"
for n in tab.colnames:
    if 'deep' in n:
        tab[n].name = n.replace('deep', 'wide')

print "making names"
# make a new column which is the IAU source name
c = Table.Column([
    "GLEAM J" +
Ejemplo n.º 10
0
    ul,
)

from astropy.io import fits
from astropy.table.table import Table

from stixcore.data.test import test_data
from stixcore.idb.manager import IDBManager
from stixcore.products.level0.scienceL0 import ScienceProduct
from stixcore.products.product import Product, read_qtable

name_counter = defaultdict(int)

IDB = IDBManager(test_data.idb.DIR).get_idb("2.26.34")
descriptions = Table.read("stixcore/data/test/ddpd.in.csv",
                          format='csv',
                          delimiter="\t")


def mtable(columns, data, tclass):
    t = table(cls=tclass, style="width: 100%; table-layout:fixed;")
    _tr = tr()
    for c, style in columns:
        _tr.add(th(c, style=style))
    t.add(thead(_tr))
    tb = tbody()
    for r in data:
        _tr = tr()
        for i, c in enumerate(r):
            _tr.add(td(c, style=columns[i][1]))
        tb.add(_tr)