Esempio n. 1
0
def addcol(msname='$MS',colname=None,shape=None,
           data_desc_type='array',valuetype=None,init_with=0,**kw):
    """ add column to MS 
        msanme : MS to add colmn to
        colname : column name
        shape : shape
        valuetype : data type 
        data_desc_type : 'scalar' for scalar elements and array for 'array' elements
        init_with : value to initialise the column with 
    """
    msname, colname = interpolate_locals('msname colname')
    tab = pyrap.tables.table(msname,readonly=False)

    try: 
        tab.getcol(colname)
        info('Column already exists')

    except RuntimeError:
        info('Attempting to add %s column to %s'%(colname,msname))
        from pyrap.tables import maketabdesc
        valuetype = valuetype or 'complex'

        if shape is None: 
            dshape = list(tab.getcol('DATA').shape)
            shape = dshape[1:]

        if data_desc_type=='array':
            from pyrap.tables import makearrcoldesc
            coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
            coldmi['NAME'] = colname.lower()
            tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)

        elif data_desc_type=='scalar':
            from pyrap.tables import makescacoldesc
            coldmi = tab.getdminfo('TIME')
            coldmi['NAME'] = colname.lower()
            tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)

        info('Column added successfuly.')

        if init_with:
            nrows = dshape[0]

            rowchunk = nrows//10 if nrows > 1000 else nrows
            for row0 in range(0,nrows,rowchunk):
                nr = min(rowchunk,nrows-row0)
                dshape[0] = nr
                tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)

    tab.close()
Esempio n. 2
0
def main(options):
    inms = options.inms
    if inms == '':
        print 'Error: you have to specify an input MS, use -h for help'
        return
    outcol = options.outcol

    t = pt.table(inms, readonly=False, ack=True)

    if outcol not in t.colnames():
        print 'Adding the output column', outcol, 'to', inms
        coldmi = t.getdminfo('DATA')
        coldmi['NAME'] = outcol
        t.addcols(
            pt.maketabdesc(
                pt.makearrcoldesc(outcol,
                                  0.,
                                  valuetype='complex',
                                  shape=numpy.array(t.getcell('DATA',
                                                              0)).shape)),
            coldmi)
        data = t.getcol('DATA')
        t.putcol(outcol, data)
    else:
        print outcol, 'column already exist'
Esempio n. 3
0
def test_fixed_column_shapes(tmp_path, column, dtype):
    """ Fixed column, shape and ndim must be supplied """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {'desc': {'_c_order': True,
                     'comment': '%s column' % column,
                     'dataManagerGroup': '',
                     'dataManagerType': '',
                     'keywords': {},
                     'ndim': 2,
                     'shape': (20, 30),
                     'maxlen': 0,
                     'option': 0,
                     'valueType': casa_type},
            'name': column}

    table_desc = pt.maketabdesc([desc])
    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, nrow=10, ack=False) as T:
        # Put 10 rows into the table
        T.putcol(column, np.zeros((10, 20, 30), dtype=dtype))
        assert T.getcol(column).shape == (10, 20, 30)

        # Must be ndim == 2
        err_str = "Table array conformance error"
        with pytest.raises(RuntimeError, match=err_str):
            T.putcol(column, np.zeros((5, 40), dtype=dtype))

        # shape != (20, 30)
        with pytest.raises(RuntimeError, match=err_str):
            T.putcol(column, np.zeros((5, 40, 30), dtype=dtype),
                     startrow=0, nrow=5)
Esempio n. 4
0
def main(options):

        cI = numpy.complex(0.,1.)

        inms = options.inms
        if inms == '':
                print 'Error: you have to specify an input MS, use -h for help'
                return
        column = options.column
        outcol = options.outcol

        t = pt.table(inms, readonly=False, ack=True)
        if outcol not in t.colnames():
                print 'Adding output column',outcol,'to',inms
                coldmi = t.getdminfo(column)
                coldmi['NAME'] = outcol
                t.addcols(pt.maketabdesc(pt.makearrcoldesc(outcol, 0., valuetype='complex', shape=numpy.array(t.getcell(column,0)).shape)), coldmi)
        print 'Reading input column'
        data = t.getcol(column)
        print 'Computing output column'
        outdata = numpy.transpose(numpy.array([
                data[:,:,0]-cI*data[:,:,1]+cI*data[:,:,2]+data[:,:,3],
                data[:,:,0]+cI*data[:,:,1]+cI*data[:,:,2]-data[:,:,3],
                data[:,:,0]-cI*data[:,:,1]-cI*data[:,:,2]-data[:,:,3],
                data[:,:,0]+cI*data[:,:,1]-cI*data[:,:,2]+data[:,:,3]]),
                (1,2,0))
        print 'Finishing up'
        t.putcol(outcol, outdata)
Esempio n. 5
0
def test_only_row_shape(tmp_path, column, dtype):
    """ Missing ndim implies row only! """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {'desc': {'_c_order': True,
                     'comment': '%s column' % column,
                     'dataManagerGroup': '',
                     'dataManagerType': '',
                     'keywords': {},
                     'maxlen': 0,
                     'option': 0,
                     'valueType': casa_type},
            'name': column}

    table_desc = pt.maketabdesc([desc])
    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, nrow=10, ack=False) as T:
        # Put 10 rows into the table
        T.putcol(column, np.zeros(10, dtype=dtype))
        assert T.getcol(column).shape == (10,)

        # Must be ndim == 2
        err_str = 'Vector<T>: ndim of other array > 1 ndim 1 differs from 2'
        with pytest.raises(RuntimeError, match=err_str):
            T.putcol(column, np.zeros((5, 40), dtype=dtype))

        # shape != (20, 30)
        err_str = 'Vector<T>: ndim of other array > 1 ndim 1 differs from 3'
        with pytest.raises(RuntimeError, match=err_str):
            T.putcol(column, np.zeros((5, 40, 30), dtype=dtype),
                     startrow=0, nrow=5)
Esempio n. 6
0
def test_scalar_ndim(tmp_path, column, dtype):
    """ ndim set to zero implies scalars """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {
        'desc': {
            '_c_order': True,
            'comment': f'{column} column',
            'dataManagerGroup': '',
            'dataManagerType': '',
            'keywords': {},
            'maxlen': 0,
            'ndim': 0,
            'option': 0,
            'valueType': casa_type
        },
        'name': column
    }

    table_desc = pt.maketabdesc([desc])
    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, nrow=10, ack=False) as T:
        for r in range(10):
            T.putcell(column, r, r)

        for r in range(10):
            assert T.getcell(column, r) == r
Esempio n. 7
0
    def addRefColumnToTesttab(self, columnname):
        if self.verbose:
            print(bcolors.OKBLUE + "Forwarding reference column " +
                  bcolors.WARNING + columnname + bcolors.OKBLUE + " to " +
                  self.test_MS + bcolors.ENDC)  # DEBUG

        testtab = pt.table(self.test_MS,
                           readonly=False)  # Open test_MS in readonly mode
        reftab = pt.table(self.MS)  # Open reference MS in readonly mode

        # Get column description from testtab
        testcolumnname = "test_" + columnname

        testtab.renamecol(
            columnname,
            testcolumnname)  # rename the existing column in the test table
        refcol_desc = reftab.getcoldesc(columnname)

        # Use ForwardColumnEngine to refer column in testtab to reftab columnname
        testtab.addcols(pt.maketabdesc(
            [pt.makecoldesc(columnname, refcol_desc)]),
                        dminfo={
                            '1': {
                                'TYPE': 'ForwardColumnEngine',
                                'NAME': 'ForwardData',
                                'COLUMNS': [columnname],
                                'SPEC': {
                                    'FORWARDTABLE': reftab.name()
                                }
                            }
                        })

        testtab.flush()
Esempio n. 8
0
def main(options):
    ms = options.ms
    if ms == '':
            print 'Error: you have to specify an input MS, use -h for help'
            return
    cols = options.cols
    incol = options.incol
    
    t = pt.table(ms, readonly=False, ack=False)

    for col in cols.split(','):
        if col not in t.colnames():
            logging.info('Adding the output column '+col+' to '+ms+'.')
            if incol == '':
                incol = 'DATA'
                update = False
            else:
                update = True

            coldmi = t.getdminfo(incol)
            coldmi['NAME'] = col
            t.addcols(pt.maketabdesc(pt.makearrcoldesc(col, 0., valuetype=t.col(incol).datatype(), shape=numpy.array(t.getcell(incol,0)).shape)), coldmi)  
            if update:
                logging.warning('Setting '+col+' = '+incol+'.')
                t.putcol(col, t.getcol(incol))
        else:
            logging.warning('Column '+col+' already exists.')

    t.close()
Esempio n. 9
0
def test_variable_column_dimensions(tmp_path, column, dtype):
    """ ndim set to -1, we can put anything in the column """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {'desc': {'_c_order': True,
                     'comment': '%s column' % column,
                     'dataManagerGroup': '',
                     'dataManagerType': '',
                     'keywords': {},
                     # This allows any shape to go in
                     'ndim': -1,
                     'maxlen': 0,
                     'option': 0,
                     'valueType': casa_type},
            'name': column}

    table_desc = pt.maketabdesc([desc])
    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, nrow=10, ack=False) as T:
        # Put 10 rows into the table
        T.putcol(column, np.zeros((10, 20, 30), dtype=dtype))
        assert T.getcol(column).shape == (10, 20, 30)

        # Put something differently shaped in the first 5 rows
        T.putcol(column, np.zeros((5, 40), dtype=dtype), startrow=0, nrow=5)
        assert T.getcol(column, startrow=0, nrow=5).shape == (5, 40)

        # The last 5 rows have the original shape
        assert T.getcol(column, startrow=5, nrow=5).shape == (5, 20, 30)

        # We can even put a scalar in
        T.putcell(column, 8, 3)
        assert T.getcell(column, 8) == 3
Esempio n. 10
0
def updateObsTable(image, msName, minbl, maxbl, aswvl, usedCounts, visCounts,
                   minTime, maxTime, totTime):
    obstab = pt.table(image.name() + "/LOFAR_OBSERVATION",
                      readonly=False,
                      ack=False)
    oritab = pt.table(image.name() + "/LOFAR_ORIGIN", ack=False)
    minfreq = pt.taql("calc min([select FREQUENCY_MIN from '" + oritab.name() +
                      "'])")
    maxfreq = pt.taql("calc max([select FREQUENCY_MAX from '" + oritab.name() +
                      "'])")
    obstab.putcell("OBSERVATION_FREQUENCY_MIN", 0, minfreq[0])
    obstab.putcell("OBSERVATION_FREQUENCY_MAX", 0, maxfreq[0])
    obstab.putcell("OBSERVATION_FREQUENCY_CENTER", 0,
                   (minfreq[0] + maxfreq[0]) / 2)
    obstab.putcell("OBSERVATION_INTEGRATION_TIME", 0, totTime)
    obstab.putcell("OBSERVATION_START", 0, minTime)
    obstab.putcell("OBSERVATION_END", 0, maxTime)
    obstab.putcell("TIME_RANGE", 0, (minTime, maxTime))
    obstab.putcell("FILENAME", 0, os.path.basename(image.name()))
    obstab.putcell("FILETYPE", 0, "sky")
    pt.taql("update '" + obstab.name() + "' set FILEDATE = mjd(date()), " +
            "RELEASE_DATE = mjd(date()+365)")
    # Determine minimum and maximum baseline length
    # If needed, convert from wavelengths to meters.
    mstab = pt.table(msName, ack=False)
    if aswvl:
        minbl *= 2.99792e8 / maxfreq[0]
        maxbl *= 2.99792e8 / minfreq[0]
    if minbl <= 0:
        mbl = pt.taql("calc sqrt(min([select sumsqr(UVW[:2]) from " + msName +
                      "]))")
        minbl = max(mbl[0], abs(minbl))
    if maxbl <= 0:
        mbl = pt.taql("calc sqrt(max([select sumsqr(UVW[:2]) from " + msName +
                      "]))")
        if maxbl == 0:
            maxbl = mbl[0]
        else:
            maxbl = min(mbl[0], abs(maxbl))
    mstab.close()
    # Add and fill a few extra columns.
    col1 = pt.makescacoldesc("MIN_BASELINE_LENGTH", 0, valuetype='double')
    col2 = pt.makescacoldesc("MAX_BASELINE_LENGTH", 0, valuetype='double')
    col3 = pt.makearrcoldesc("NVIS_USED", 0, valuetype='int')
    col4 = pt.makearrcoldesc("NVIS_TOTAL", 0, valuetype='int')
    obstab.addcols(pt.maketabdesc([col1, col2, col3, col4]))
    obstab.putcolkeyword("MIN_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcolkeyword("MAX_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcell("MIN_BASELINE_LENGTH", 0, minbl)
    obstab.putcell("MAX_BASELINE_LENGTH", 0, maxbl)
    # Get sum for all MSs.
    tusedCounts = usedCounts.sum(axis=0)
    tvisCounts = visCounts.sum(axis=0)
    obstab.putcell("NVIS_USED", 0, tusedCounts)
    obstab.putcell("NVIS_TOTAL", 0, tvisCounts)
    obstab.close()
    oritab.close()
    print "Updated subtable LOFAR_OBSERVATION"
Esempio n. 11
0
def addcol(ms, incol, outcol):
    if outcol not in ms.colnames():
        logging.info('Adding column: '+outcol)
        coldmi = ms.getdminfo(incol)
        coldmi['NAME'] = outcol
        datatype = ms.col(incol).datatype()
        ms.addcols(pt.maketabdesc(pt.makearrcoldesc(outcol, 0., valuetype=datatype, shape=np.array(ms.getcell(incol,0)).shape)), coldmi)
    if outcol != incol:
        # copy columns val
        logging.info('Set '+outcol+'='+incol)
        ms.putcol(outcol, ms.getcol(incol))
Esempio n. 12
0
def updateObsTable (image, msName, minbl, maxbl, aswvl,
                    usedCounts, visCounts, minTime, maxTime, totTime):
    obstab = pt.table (image.name() + "/LOFAR_OBSERVATION", readonly=False,
                       ack=False)
    oritab = pt.table (image.name() + "/LOFAR_ORIGIN", ack=False)
    minfreq = pt.taql ("calc min([select FREQUENCY_MIN from '" +
                       oritab.name() + "'])")
    maxfreq = pt.taql ("calc max([select FREQUENCY_MAX from '" +
                       oritab.name() + "'])") 
    obstab.putcell ("OBSERVATION_FREQUENCY_MIN", 0, minfreq[0]);
    obstab.putcell ("OBSERVATION_FREQUENCY_MAX", 0, maxfreq[0]);
    obstab.putcell ("OBSERVATION_FREQUENCY_CENTER", 0, (minfreq[0]+maxfreq[0])/2);
    obstab.putcell ("OBSERVATION_INTEGRATION_TIME", 0, totTime);
    obstab.putcell ("OBSERVATION_START", 0, minTime);
    obstab.putcell ("OBSERVATION_END", 0, maxTime);
    obstab.putcell ("TIME_RANGE", 0, (minTime, maxTime));
    obstab.putcell ("FILENAME", 0, os.path.basename(image.name()))
    obstab.putcell ("FILETYPE", 0, "sky")
    pt.taql ("update '" + obstab.name() + "' set FILEDATE = mjd(date()), " +
             "RELEASE_DATE = mjd(date()+365)")
    # Determine minimum and maximum baseline length
    # If needed, convert from wavelengths to meters.
    mstab = pt.table(msName, ack=False)
    if aswvl:
        minbl *= 2.99792e8 / maxfreq[0]
        maxbl *= 2.99792e8 / minfreq[0]
    if minbl <= 0:
        mbl = pt.taql ("calc sqrt(min([select sumsqr(UVW[:2]) from " + msName + "]))")
        minbl = max(mbl[0], abs(minbl))
    if maxbl <= 0:
        mbl = pt.taql ("calc sqrt(max([select sumsqr(UVW[:2]) from " + msName + "]))")
        if maxbl == 0:
            maxbl = mbl[0]
        else:
            maxbl = min(mbl[0], abs(maxbl))
    mstab.close()
    # Add and fill a few extra columns.
    col1 = pt.makescacoldesc ("MIN_BASELINE_LENGTH", 0, valuetype='double')
    col2 = pt.makescacoldesc ("MAX_BASELINE_LENGTH", 0, valuetype='double')
    col3 = pt.makearrcoldesc ("NVIS_USED", 0, valuetype='int')
    col4 = pt.makearrcoldesc ("NVIS_TOTAL", 0, valuetype='int')
    obstab.addcols (pt.maketabdesc ([col1, col2, col3, col4]))
    obstab.putcolkeyword ("MIN_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcolkeyword ("MAX_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcell ("MIN_BASELINE_LENGTH", 0, minbl)
    obstab.putcell ("MAX_BASELINE_LENGTH", 0, maxbl)
    # Get sum for all MSs.
    tusedCounts = usedCounts.sum (axis=0)
    tvisCounts  =  visCounts.sum (axis=0)
    obstab.putcell ("NVIS_USED", 0, tusedCounts)
    obstab.putcell ("NVIS_TOTAL", 0, tvisCounts)
    obstab.close()
    oritab.close()
    print "Updated subtable LOFAR_OBSERVATION"
Esempio n. 13
0
    def addRefColumnToTesttab(self, columnname):
        if self.verbose:
            print bcolors.OKBLUE + "Forwarding reference column " + bcolors.WARNING + columnname + bcolors.OKBLUE + " to " + self.test_MS + bcolors.ENDC           # DEBUG
           
        testtab=pt.table(self.test_MS, readonly=False)          # Open test_MS in readonly mode
        reftab=pt.table(self.MS)                  # Open reference MS in readonly mode
  
        # Get column description from testtab
        testcolumnname = "test_" + columnname
        
        testtab.renamecol(columnname, testcolumnname)           # rename the existing column in the test table
        refcol_desc=reftab.getcoldesc(columnname)  

        # Use ForwardColumnEngine to refer column in testtab to reftab columnname
        testtab.addcols(pt.maketabdesc([pt.makecoldesc(columnname, refcol_desc)]), dminfo={'1':{'TYPE':'ForwardColumnEngine', 'NAME':'ForwardData', 'COLUMNS':[columnname], 'SPEC':{'FORWARDTABLE':reftab.name()}}})

        testtab.flush()
def main(options):
    inms = options.inms
    if inms == '':
        print 'Error: you have to specify an input MS, use -h for help'
        return
    outcol = options.outcol

    t = pt.table(inms, readonly=False, ack=True)

    if outcol not in t.colnames():
        print 'Adding the output column',outcol,'to',inms
        coldmi = t.getdminfo('WEIGHT_SPECTRUM')
        coldmi['NAME'] = outcol
        t.addcols(pt.maketabdesc(pt.makearrcoldesc(outcol, 0., valuetype='float', shape=numpy.array(t.getcell('WEIGHT_SPECTRUM',0)).shape)), coldmi)
        data = t.getcol('WEIGHT_SPECTRUM')
        t.putcol(outcol, data)
    else:
        print outcol, 'column already exist'
Esempio n. 15
0
def test_tiledstman(tmp_path, column, row, shape, dtype):
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {
        'desc': {
            '_c_order': True,
            'comment': f'{column} column',
            'dataManagerGroup': 'BAZ-GROUP',
            'dataManagerType': 'TiledColumnStMan',
            'keywords': {},
            'maxlen': 0,
            'ndim': len(shape),
            'option': 0,
            'shape': shape,
            'valueType': casa_type
        },
        'name': column
    }

    table_desc = pt.maketabdesc([desc])

    tile_shape = tuple(reversed(shape)) + (row, )

    dminfo = {
        '*1': {
            'NAME': 'BAZ-GROUP',
            'TYPE': 'TiledColumnStMan',
            'SPEC': {
                'DEFAULTTILESHAPE': tile_shape
            },
            'COLUMNS': ['BAZ'],
        }
    }

    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, dminfo=dminfo, nrow=10, ack=False) as T:
        dmg = T.getdminfo()['*1']
        assert dmg['NAME'] == 'BAZ-GROUP'
        assert_array_equal(dmg['SPEC']['DEFAULTTILESHAPE'], tile_shape)
        assert dmg['TYPE'] == 'TiledColumnStMan'
        assert dmg['COLUMNS'] == ['BAZ']
Esempio n. 16
0
def test_variable_column_shapes(tmp_path, column, dtype):
    """ ndim set to 2, but shapes are variable """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {
        'desc': {
            '_c_order': True,
            'comment': f'{column} column',
            'dataManagerGroup': '',
            'dataManagerType': '',
            'keywords': {},
            'ndim': 2,
            'maxlen': 0,
            'option': 0,
            'valueType': casa_type
        },
        'name': column
    }

    table_desc = pt.maketabdesc([desc])
    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, nrow=10, ack=False) as T:
        # Put 10 rows into the table
        T.putcol(column, np.zeros((10, 20, 30), dtype=dtype))
        assert T.getcol(column).shape == (10, 20, 30)

        # Must be ndim == 2
        err_str = "Table array conformance error"
        with pytest.raises(RuntimeError, match=err_str):
            T.putcol(column, np.zeros((5, 40), dtype=dtype))

        # Put something differently shaped in the first 5 rows
        T.putcol(column,
                 np.zeros((5, 40, 30), dtype=dtype),
                 startrow=0,
                 nrow=5)
        assert T.getcol(column, startrow=0, nrow=5).shape == (5, 40, 30)

        # The last 5 rows have the original shape
        assert T.getcol(column, startrow=5, nrow=5).shape == (5, 20, 30)
Esempio n. 17
0
def test_variable_column_descriptor(chunks, dtype, tmp_path):
    column_meta = []
    shapes = {k: sum(c) for k, c in chunks.items()}

    # Make some visibilities
    dims = ("row", "chan", "corr")
    shape = tuple(shapes[d] for d in dims)
    data_chunks = tuple(chunks[d] for d in dims)
    data = da.random.random(shape, chunks=data_chunks).astype(dtype)
    data_var = Variable(dims, data, {})
    meta = variable_column_descriptor("DATA", data_var)
    column_meta.append({"name": "DATA", "desc": meta})

    # Make some string names
    dims = ("row", )
    shape = tuple(shapes[d] for d in dims)
    str_chunks = tuple(chunks[d] for d in dims)
    np_str_array = np.asarray(["BOB"] * shape[0], dtype=np.object)
    da_str_array = da.from_array(np_str_array, chunks=str_chunks)
    str_array_var = Variable(dims, da_str_array, {})
    meta = variable_column_descriptor("NAMES", str_array_var)
    column_meta.append({"name": "NAMES", "desc": meta})

    # Create a new table with the column metadata
    fn = os.path.join(str(tmp_path), "test.ms")
    tabdesc = pt.maketabdesc(column_meta)

    with pt.table(fn, tabdesc, readonly=False, ack=False) as T:
        # Add rows
        T.addrows(shapes['row'])

        str_list = np_str_array.tolist()

        # Put data
        T.putcol("DATA", data.compute())
        T.putcol("NAMES", str_list)

        # We get out what we put in
        assert_array_equal(T.getcol("NAMES"), str_list)
        assert_array_equal(T.getcol("DATA"), data)
Esempio n. 18
0
def export_ms(hdf_file, ms_file, verbosity=1):
    """ Convert an HDF file to MS
    :param hdf_file: Input HDF-MS filename
    :param ms_file: Output MS filename

    TODO: Get this working properly.
    """
    pp = PrintLog(verbosity=verbosity)
    hdul = IdiHdulist(verbosity=1)
    hdul.read_hdf("testms.h5")

    main_hdu = hdul["MAIN"]

    vdict = {
        'float32': 'float',
        'float64': 'double',
        'complex64': 'complex',
        'complex128': 'dcomplex',
        'int32': 'int',
        'uint32': 'uint',
        'str': 'string',
        'bool': 'bool'
    }

    col_descs = []
    for col, cdata in main_hdu.data.items():
        col = str(col)
        pp.pp("%16s %s %s" % (col, cdata.shape, cdata.dtype))

        if cdata.ndim == 1:
            vt = vdict[str(cdata.dtype)]
            cdesc = pt.makescacoldesc(col, cdata[0], valuetype=vt)
        else:
            cdesc = pt.makearrcoldesc(col, cdata[0], valuetype=vt)
        col_descs.append(cdesc)

    tdesc = pt.maketabdesc(col_descs)

    t = pt.table("table.ms", tdesc, nrow=main_hdu.n_rows)
Esempio n. 19
0
def main(options):

    cI = numpy.complex(0., 1.)

    inms = options.inms
    if inms == '':
        print 'Error: you have to specify an input MS, use -h for help'
        return
    column = options.column
    outcol = options.outcol

    t = pt.table(inms, readonly=False, ack=True)
    if outcol not in t.colnames():
        print 'Adding output column', outcol, 'to', inms
        coldmi = t.getdminfo(column)
        coldmi['NAME'] = outcol
        t.addcols(
            pt.maketabdesc(
                pt.makearrcoldesc(outcol,
                                  0.,
                                  valuetype='complex',
                                  shape=numpy.array(t.getcell(column,
                                                              0)).shape)),
            coldmi)
    print 'Reading input column'
    data = t.getcol(column)
    print 'Computing output column'
    outdata = numpy.transpose(
        numpy.array([
            data[:, :, 0] - cI * data[:, :, 1] + cI * data[:, :, 2] +
            data[:, :, 3], data[:, :, 0] + cI * data[:, :, 1] +
            cI * data[:, :, 2] - data[:, :, 3], data[:, :, 0] -
            cI * data[:, :, 1] - cI * data[:, :, 2] - data[:, :, 3],
            data[:, :, 0] + cI * data[:, :, 1] - cI * data[:, :, 2] +
            data[:, :, 3]
        ]), (1, 2, 0))
    print 'Finishing up'
    t.putcol(outcol, outdata)
Esempio n. 20
0
def export_ms(hdf_file, ms_file, verbosity=1):
    """ Convert an HDF file to MS
    :param hdf_file: Input HDF-MS filename
    :param ms_file: Output MS filename

    TODO: Get this working properly.
    """
    pp = PrintLog(verbosity=verbosity)
    hdul = IdiHdulist(verbosity=1)
    hdul.read_hdf("testms.h5")

    main_hdu = hdul["MAIN"]

    vdict = {'float32' : 'float',
               'float64' : 'double',
               'complex64' : 'complex',
               'complex128' : 'dcomplex',
               'int32'  : 'int',
               'uint32' : 'uint',
               'str'    : 'string',
               'bool'   : 'bool'
                }

    col_descs = []
    for col, cdata in main_hdu.data.items():
        col = str(col)
        pp.pp("%16s %s %s" % (col, cdata.shape, cdata.dtype))

        if cdata.ndim == 1:
            vt = vdict[str(cdata.dtype)]
            cdesc = pt.makescacoldesc(col, cdata[0], valuetype=vt)
        else:
            cdesc = pt.makearrcoldesc(col, cdata[0], valuetype=vt)
        col_descs.append(cdesc)

    tdesc = pt.maketabdesc(col_descs)

    t = pt.table("table.ms", tdesc, nrow=main_hdu.n_rows)
Esempio n. 21
0
def addcol(msname,
           colname=None,
           shape=None,
           data_desc_type='array',
           valuetype=None,
           init_with=None,
           coldesc=None,
           coldmi=None,
           clone='DATA',
           rowchunk=None,
           **kw):
    """ Add column to MS 
        msanme : MS to add colmn to
        colname : column name
        shape : shape
        valuetype : data type 
        data_desc_type : 'scalar' for scalar elements and array for 'array' elements
        init_with : value to initialise the column with 
    """
    tab = table(msname, readonly=False)

    if colname in tab.colnames():
        print('Column already exists')
        return 'exists'

    print('Attempting to add %s column to %s' % (colname, msname))

    valuetype = valuetype or 'complex'

    if coldesc:
        data_desc = coldesc
        shape = coldesc['shape']
    elif shape:
        data_desc = maketabdesc(
            makearrcoldesc(colname,
                           init_with,
                           shape=shape,
                           valuetype=valuetype))
    elif valuetype == 'scalar':
        data_desc = maketabdesc(
            makearrcoldesc(colname, init_with, valuetype=valuetype))
    elif clone:
        element = tab.getcell(clone, 0)
        try:
            shape = element.shape
            data_desc = maketabdesc(
                makearrcoldesc(colname,
                               element.flatten()[0],
                               shape=shape,
                               valuetype=valuetype))
        except AttributeError:
            shape = []
            data_desc = maketabdesc(
                makearrcoldesc(colname, element, valuetype=valuetype))

    colinfo = [data_desc, coldmi] if coldmi else [data_desc]
    tab.addcols(*colinfo)

    print('Column added successfuly.')

    if init_with is None:
        tab.close()
        return 'added'
    else:
        spwids = set(tab.getcol('DATA_DESC_ID'))
        for spw in spwids:
            print('Initialising {0:s} column with {1}. DDID is {2:d}'.format(
                colname, init_with, spw))
            tab_spw = tab.query('DATA_DESC_ID=={0:d}'.format(spw))
            nrows = tab_spw.nrows()

            rowchunk = rowchunk or nrows / 10
            dshape = [0] + [a for a in shape]
            for row0 in range(0, nrows, rowchunk):
                nr = min(rowchunk, nrows - row0)
                dshape[0] = nr
                print("Wrtiting to column  %s (rows %d to %d)" %
                      (colname, row0, row0 + nr - 1))
                tab_spw.putcol(
                    colname,
                    numpy.ones(dshape, dtype=type(init_with)) * init_with,
                    row0, nr)
            tab_spw.close()
    tab.close()

    return 'added'
Esempio n. 22
0
def main(options):

    cI = numpy.complex(0., 1.)

    inms = options.inms
    if inms == '':
        print 'Error: you have to specify an input MS, use -h for help'
        return
    column = options.column
    outcol = options.outcol

    t = pt.table(inms, readonly=False, ack=True)
    if options.back:
        lincol = options.lincol
        if lincol not in t.colnames():
            print 'Adding the output linear polarization column', lincol, 'to', inms
            coldmi = t.getdminfo(column)
            coldmi['NAME'] = lincol
            t.addcols(
                pt.maketabdesc(
                    pt.makearrcoldesc(lincol,
                                      0.,
                                      valuetype='complex',
                                      shape=numpy.array(t.getcell(column,
                                                                  0)).shape)),
                coldmi)

### RVW EDIT 2012
        print 'Reading the input column (circular)', column
        if column not in t.colnames():
            print 'Error: Input column does not exist'
            return

        ### RVW EDIT 2012 Input column with the -c switch
        cirdata = t.getcol(column)
        #cirdata = t.getcol(outcol)
        #print 'SHAPE ARRAY', numpy.shape(cirdata)

        print 'Computing the linear polarization terms...'
        lindata = numpy.transpose(
            numpy.array([
                0.5 * (cirdata[:, :, 0] + cirdata[:, :, 1] + cirdata[:, :, 2] +
                       cirdata[:, :, 3]),
                0.5 * (cI * cirdata[:, :, 0] - cI * cirdata[:, :, 1] +
                       cI * cirdata[:, :, 2] - cI * cirdata[:, :, 3]),
                0.5 * (-cI * cirdata[:, :, 0] - cI * cirdata[:, :, 1] +
                       cI * cirdata[:, :, 2] + cI * cirdata[:, :, 3]),
                0.5 * (cirdata[:, :, 0] - cirdata[:, :, 1] - cirdata[:, :, 2] +
                       cirdata[:, :, 3])
            ]), (1, 2, 0))
        print 'Finishing up...'
        t.putcol(lincol, lindata)
    else:
        if outcol not in t.colnames():
            print 'Adding the output column', outcol, 'to', inms
            coldmi = t.getdminfo(column)
            coldmi['NAME'] = outcol
            t.addcols(
                pt.maketabdesc(
                    pt.makearrcoldesc(outcol,
                                      0.,
                                      valuetype='complex',
                                      shape=numpy.array(t.getcell(column,
                                                                  0)).shape)),
                coldmi)
        print 'Reading the input column (linear)', column
        data = t.getcol(column)
        print 'Computing the output column'
        outdata = numpy.transpose(
            numpy.array([
                0.5 * (data[:, :, 0] - cI * data[:, :, 1] +
                       cI * data[:, :, 2] + data[:, :, 3]),
                0.5 * (data[:, :, 0] + cI * data[:, :, 1] +
                       cI * data[:, :, 2] - data[:, :, 3]),
                0.5 * (data[:, :, 0] - cI * data[:, :, 1] -
                       cI * data[:, :, 2] - data[:, :, 3]),
                0.5 * (data[:, :, 0] + cI * data[:, :, 1] -
                       cI * data[:, :, 2] + data[:, :, 3])
            ]), (1, 2, 0))
        print 'Finishing up...'
        t.putcol(outcol, outdata)
    if options.poltable:
        print 'Updating the POLARIZATION table...'
        tp = pt.table(inms + '/POLARIZATION', readonly=False, ack=True)

        ### RVW EDIT 2012
        if options.back:
            tp.putcol('CORR_TYPE',
                      numpy.array([[9, 10, 11, 12]],
                                  dtype=numpy.int32))  # FROM CIRC-->LIN
        else:
            tp.putcol('CORR_TYPE',
                      numpy.array([[5, 6, 7, 8]],
                                  dtype=numpy.int32))  # FROM LIN-->CIRC
Esempio n. 23
0
def test_tiledstman_addcols(tmp_path, column, row, shape, dtype):
    """ ndim set to zero implies scalars """
    casa_type = infer_casa_type(dtype)

    # Column descriptor
    desc = {
        'desc': {
            '_c_order': True,
            'comment': f'{column} column',
            'dataManagerGroup': 'BAZ_GROUP',
            'dataManagerType': 'TiledColumnStMan',
            'keywords': {},
            'maxlen': 0,
            'ndim': len(shape),
            'option': 0,
            'shape': shape,
            'valueType': casa_type
        },
        'name': column
    }

    table_desc = pt.maketabdesc([desc])

    tile_shape = tuple(reversed(shape)) + (row, )

    dminfo = {
        '*1': {
            'NAME': 'BAZ_GROUP',
            'TYPE': 'TiledColumnStMan',
            'SPEC': {
                'DEFAULTTILESHAPE': tile_shape
            },
            'COLUMNS': ['BAZ'],
        }
    }

    fn = os.path.join(str(tmp_path), "test.table")

    with pt.table(fn, table_desc, dminfo=dminfo, nrow=10, ack=False) as T:
        # Add a new FRED column
        desc = {
            'FRED': {
                'dataManagerGroup': 'FRED_GROUP',
                'dataManagerType': 'TiledColumnStMan',
                'ndim': len(shape),
                'shape': shape,
                'valueType': casa_type
            }
        }

        dminfo = {
            '*1': {
                'NAME': 'FRED_GROUP',
                'TYPE': 'TiledColumnStMan',
                'SPEC': {
                    'DEFAULTTILESHAPE': tile_shape
                },
                'COLUMNS': ['FRED'],
            }
        }

        T.addcols(desc, dminfo=dminfo)

        # Trying to add a new QUX column by redefining FRED_GROUP fails
        desc = {
            'QUX': {
                'dataManagerGroup': 'FRED_GROUP',
                'dataManagerType': 'TiledColumnStMan',
                'ndim': len(shape),
                'shape': shape,
                'valueType': casa_type
            }
        }

        dminfo = {
            '*1': {
                'NAME': 'FRED_GROUP',
                'TYPE': 'TiledColumnStMan',
                'SPEC': {
                    'DEFAULTTILESHAPE': tile_shape
                },
                'COLUMNS': ['FRED', 'QUX'],
            }
        }

        with pytest.raises(RuntimeError, match="Data manager name FRED_GROUP"):
            T.addcols(desc, dminfo=dminfo)

        groups = {g['NAME']: g for g in T.getdminfo().values()}
        assert set(["BAZ_GROUP", "FRED_GROUP"]) == set(groups.keys())

        # Adding new QUX column succeeds, but can't
        # add columns to an existing TiledColumnStMan?
        # casacore creates a new group, FRED_GROUP_1
        T.addcols(desc)

        groups = {g['NAME']: g for g in T.getdminfo().values()}
        assert set(["BAZ_GROUP", "FRED_GROUP",
                    "FRED_GROUP_1"]) == set(groups.keys())

        # Add ACK and BAR to the ACKBAR_GROUP at the same time succeeds
        desc = {
            "ACK": {
                'dataManagerGroup': 'ACKBAR_GROUP',
                'dataManagerType': 'TiledColumnStMan',
                'ndim': len(shape),
                'shape': shape,
                'valueType': casa_type
            },
            "BAR": {
                'dataManagerGroup': 'ACKBAR_GROUP',
                'dataManagerType': 'TiledColumnStMan',
                'ndim': len(shape),
                'shape': shape,
                'valueType': casa_type
            },
        }

        dminfo = {
            '*1': {
                'NAME': 'ACKBAR_GROUP',
                'TYPE': 'TiledColumnStMan',
                'SPEC': {
                    'DEFAULTTILESHAPE': tile_shape
                },
                'COLUMNS': ['ACK', 'BAR'],
            }
        }

        T.addcols(desc, dminfo=dminfo)

        groups = {g['NAME']: g for g in T.getdminfo().values()}
        assert set(["BAZ_GROUP", "FRED_GROUP", "FRED_GROUP_1",
                    "ACKBAR_GROUP"]) == set(groups.keys())

        assert set(groups["ACKBAR_GROUP"]['COLUMNS']) == set(["ACK", "BAR"])
Esempio n. 24
0
def main(options):

	cI = numpy.complex(0.,1.)
	
	inms = options.inms
	if inms == '':
			print 'Error: you have to specify an input MS, use -h for help'
			return
	column = options.column
	outcol = options.outcol
	
	t = pt.table(inms, readonly=False, ack=True)
	if options.back:
		lincol = options.lincol
		if lincol not in t.colnames():
				print 'Adding the output linear polarization column',lincol,'to',inms
				coldmi = t.getdminfo(column)
				coldmi['NAME'] = lincol
				t.addcols(pt.maketabdesc(pt.makearrcoldesc(lincol, 0., valuetype='complex', shape=numpy.array(t.getcell(column,0)).shape)), coldmi)

                ### RVW EDIT 2012   
		print 'Reading the input column (circular)', column
		if column not in t.colnames():
			print 'Error: Input column does not exist'
			return
		
		### RVW EDIT 2012 Input column with the -c switch 
		cirdata = t.getcol(column)
		#cirdata = t.getcol(outcol)
		#print 'SHAPE ARRAY', numpy.shape(cirdata)
		
		print 'Computing the linear polarization terms...'
		lindata = numpy.transpose(numpy.array([
				0.5*(cirdata[:,:,0]+cirdata[:,:,1]+cirdata[:,:,2]+cirdata[:,:,3]),
				0.5*(cI*cirdata[:,:,0]-cI*cirdata[:,:,1]+cI*cirdata[:,:,2]-cI*cirdata[:,:,3]),
				0.5*(-cI*cirdata[:,:,0]-cI*cirdata[:,:,1]+cI*cirdata[:,:,2]+cI*cirdata[:,:,3]),
				0.5*(cirdata[:,:,0]-cirdata[:,:,1]-cirdata[:,:,2]+cirdata[:,:,3])]),
				(1,2,0))
		print 'Finishing up...'
		t.putcol(lincol, lindata)
	else:
		if outcol not in t.colnames():
			print 'Adding the output column',outcol,'to',inms
			coldmi = t.getdminfo(column)
			coldmi['NAME'] = outcol
			t.addcols(pt.maketabdesc(pt.makearrcoldesc(outcol, 0., valuetype='complex', shape=numpy.array(t.getcell(column,0)).shape)), coldmi)
		print 'Reading the input column (linear)', column
		data = t.getcol(column)
		print 'Computing the output column'
		outdata = numpy.transpose(numpy.array([
				0.5*(data[:,:,0]-cI*data[:,:,1]+cI*data[:,:,2]+data[:,:,3]),
				0.5*(data[:,:,0]+cI*data[:,:,1]+cI*data[:,:,2]-data[:,:,3]),
				0.5*(data[:,:,0]-cI*data[:,:,1]-cI*data[:,:,2]-data[:,:,3]),
				0.5*(data[:,:,0]+cI*data[:,:,1]-cI*data[:,:,2]+data[:,:,3])]),
				(1,2,0))
		print 'Finishing up...'
		t.putcol(outcol, outdata)
	if options.poltable:
		print 'Updating the POLARIZATION table...'
		tp = pt.table(inms+'/POLARIZATION',readonly=False,ack=True)
		
		### RVW EDIT 2012
		if options.back:
		   tp.putcol('CORR_TYPE',numpy.array([[9,10,11,12]],dtype=numpy.int32)) # FROM CIRC-->LIN
                else:
		   tp.putcol('CORR_TYPE',numpy.array([[5,6,7,8]],dtype=numpy.int32)) # FROM LIN-->CIRC
Esempio n. 25
0
import pyrap.tables as pt
# Get nr of channels
t = pt.table(msname + '/SPECTRAL_WINDOW')
nf = t.getcell('NUM_CHAN', 0)
t.close()
# Get nr of receptors and value (for the datatype)
t = pt.table(msname + '/SYSCAL', readonly=False)
val = t.getcell('TCAL', 0)
shp = [nf, len(val)]
shpstr = str(shp)
# Add a freq-dep column for each freq-indep column
t.addcols(
    pt.maketabdesc(pt.makecoldesc('TCAL_SPECTRUM', val[0], 2, shp),
                   pt.makecoldesc('TSYS_SPECTRUM', val[0], 2,
                                  shp)))  #etc for other columns
t.close()
# Copy the freq-indep values to the freq-dep (TaQL's array function extends the values)
pt.taql(
    'update %s/SYSCAL set TCAL_SPECTRUM=array(TCAL,%s), TSYS_SPECTRUM=array(TSYS,%s)'
    % (msname, shpstr, shpstr))
Esempio n. 26
0
def kat_ms_desc_and_dminfo(nbl, nchan, ncorr, model_data=False):
    """
    Creates Table Description and Data Manager Information objecs that
    describe a MeasurementSet suitable for holding MeerKAT data.

    Creates additional DATA, IMAGING_WEIGHT and possibly
    MODEL_DATA and CORRECTED_DATA columns.

    Columns are given fixed shapes defined by the arguments to this function.

    :param nbl: Number of baselines.
    :param nchan: Number of channels.
    :param ncorr: Number of correlations.
    :param model_data: Boolean indicated whether MODEL_DATA and CORRECTED_DATA
                        should be added to the Measurement Set.
    :return: Returns a tuple containing a table description describing
            the extra columns and hypercolumns, as well as a Data Manager
            description.
    """

    if not casacore_binding == 'pyrap':
        raise ValueError("kat_ms_desc_and_dminfo requires the "
                         "casacore binding to operate")

    # Columns that will be modified.
    # We want to keep things like their
    # keywords, dims and shapes
    modify_columns = {
        "WEIGHT", "SIGMA", "FLAG", "FLAG_CATEGORY", "UVW", "ANTENNA1",
        "ANTENNA2"
    }

    # Get the required table descriptor for an MS
    table_desc = tables.required_ms_desc("MAIN")

    # Take columns we wish to modify
    extra_table_desc = {
        c: d
        for c, d in table_desc.iteritems() if c in modify_columns
    }

    # Used to set the SPEC for each Data Manager Group
    dmgroup_spec = {}

    def dmspec(coldesc, tile_mem_limit=None):
        """
        Create data manager spec for a given column description,
        mostly by adding a DEFAULTTILESHAPE that fits into the
        supplied memory limit.
        """

        # Choose 4MB if none given
        if tile_mem_limit is None:
            tile_mem_limit = 4 * 1024 * 1024

        # Get the reversed column shape. DEFAULTTILESHAPE is deep in
        # casacore and its necessary to specify their ordering here
        # ntilerows is the dim that will change least quickly
        rev_shape = list(reversed(coldesc["shape"]))

        ntilerows = 1
        np_dtype = MS_TO_NP_TYPE_MAP[coldesc["valueType"].upper()]
        nbytes = np.dtype(np_dtype).itemsize

        # Try bump up the number of rows in our tiles while they're
        # below the memory limit for the tile
        while np.product(rev_shape +
                         [2 * ntilerows]) * nbytes < tile_mem_limit:
            ntilerows *= 2

        return {"DEFAULTTILESHAPE": np.int32(rev_shape + [ntilerows])}

    # Update existing columns with shape and data manager information
    dm_group = 'UVW'
    shape = [3]
    extra_table_desc["UVW"].update(options=0,
                                   shape=shape,
                                   ndim=len(shape),
                                   dataManagerGroup=dm_group,
                                   dataManagerType='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(extra_table_desc["UVW"])

    dm_group = 'Weight'
    shape = [ncorr]
    extra_table_desc["WEIGHT"].update(options=4,
                                      shape=shape,
                                      ndim=len(shape),
                                      dataManagerGroup=dm_group,
                                      dataManagerType='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(extra_table_desc["WEIGHT"])

    dm_group = 'Sigma'
    shape = [ncorr]
    extra_table_desc["SIGMA"].update(options=4,
                                     shape=shape,
                                     ndim=len(shape),
                                     dataManagerGroup=dm_group,
                                     dataManagerType='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(extra_table_desc["SIGMA"])

    dm_group = 'Flag'
    shape = [nchan, ncorr]
    extra_table_desc["FLAG"].update(options=4,
                                    shape=shape,
                                    ndim=len(shape),
                                    dataManagerGroup=dm_group,
                                    dataManagerType='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(extra_table_desc["FLAG"])

    dm_group = 'FlagCategory'
    shape = [1, nchan, ncorr]
    extra_table_desc["FLAG_CATEGORY"].update(
        options=4,
        keywords={},
        shape=shape,
        ndim=len(shape),
        dataManagerGroup=dm_group,
        dataManagerType='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(extra_table_desc["FLAG_CATEGORY"])

    # Create new columns for integration into the MS
    additional_columns = []

    dm_group = 'Data'
    shape = [nchan, ncorr]
    desc = tables.tablecreatearraycoldesc("DATA",
                                          0 + 0j,
                                          comment="The Visibility DATA Column",
                                          options=4,
                                          valuetype='complex',
                                          keywords={"UNIT": "Jy"},
                                          shape=shape,
                                          ndim=len(shape),
                                          datamanagergroup=dm_group,
                                          datamanagertype='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(desc["desc"])
    additional_columns.append(desc)

    dm_group = 'ImagingWeight'
    shape = [nchan]
    desc = tables.tablecreatearraycoldesc(
        "IMAGING_WEIGHT",
        0,
        comment="Weight set by imaging task (e.g. uniform weighting)",
        options=4,
        valuetype='float',
        shape=shape,
        ndim=len(shape),
        datamanagergroup=dm_group,
        datamanagertype='TiledColumnStMan')
    dmgroup_spec[dm_group] = dmspec(desc["desc"])
    additional_columns.append(desc)

    # Add MODEL_DATA and CORRECTED_DATA if requested
    if model_data == True:
        dm_group = 'ModelData'
        shape = [nchan, ncorr]
        desc = tables.tablecreatearraycoldesc(
            "MODEL_DATA",
            0 + 0j,
            comment="The Visibility MODEL_DATA Column",
            options=4,
            valuetype='complex',
            keywords={"UNIT": "Jy"},
            shape=shape,
            ndim=len(shape),
            datamanagergroup=dm_group,
            datamanagertype='TiledColumnStMan')
        dmgroup_spec[dm_group] = dmspec(desc["desc"])
        additional_columns.append(desc)

        dm_group = 'CorrectedData'
        shape = [nchan, ncorr]
        desc = tables.tablecreatearraycoldesc(
            "CORRECTED_DATA",
            0 + 0j,
            comment="The Visibility CORRECTED_DATA Column",
            options=4,
            valuetype='complex',
            keywords={"UNIT": "Jy"},
            shape=shape,
            ndim=len(shape),
            datamanagergroup=dm_group,
            datamanagertype='TiledColumnStMan')
        dmgroup_spec[dm_group] = dmspec(desc["desc"])
        additional_columns.append(desc)

    # Update extra table description with additional columns
    extra_table_desc.update(tables.maketabdesc(additional_columns))

    # Update the original table descriptor with modifications/additions
    # Need this to construct a complete Data Manager specification
    # that includes the original columns
    table_desc.update(extra_table_desc)

    # Construct DataManager Specification
    dminfo = tables.makedminfo(table_desc, dmgroup_spec)

    return extra_table_desc, dminfo
Esempio n. 27
0
import pyrap.tables as pt
# Get nr of channels
t = pt.table (msname + '/SPECTRAL_WINDOW')
nf = t.getcell('NUM_CHAN', 0)
t.close()
# Get nr of receptors and value (for the datatype)
t = pt.table (msname + '/SYSCAL', readonly=False)
val = t.getcell('TCAL', 0)
shp = [nf, len(val)]
shpstr = str(shp)
# Add a freq-dep column for each freq-indep column
t.addcols (pt.maketabdesc(pt.makecoldesc('TCAL_SPECTRUM', val[0], 2, shp),
                                      pt.makecoldesc('TSYS_SPECTRUM', val[0], 2, shp)))   #etc for other columns
t.close()
# Copy the freq-indep values to the freq-dep (TaQL's array function extends the values)
pt.taql ('update %s/SYSCAL set TCAL_SPECTRUM=array(TCAL,%s), TSYS_SPECTRUM=array(TSYS,%s)' % (msname,shpstr,shpstr))