コード例 #1
0
def support_tables(args, tables):
    """
    Parameters
    ----------
    args : object
        Script argument objects
    tables : list of str
        List of support tables to open

    Returns
    -------
    table_map : dict of :class:`xarray.Dataset`
        {name: dataset}
    """
    return {t: [ds.compute() for ds in
                xds_from_table("::".join((args.ms, t)),
                               group_cols="__row__")]
            for t in tables}
コード例 #2
0

args = create_parser().parse_args()

if args.ncpu:
    ncpu = args.ncpu
    from multiprocessing.pool import ThreadPool
    dask.config.set(pool=ThreadPool(ncpu))
else:
    import multiprocessing
    ncpu = multiprocessing.cpu_count()

print("Using %i threads" % ncpu)

# Get MS frequencies
spw_ds = list(xds_from_table("::".join((args.ms, "SPECTRAL_WINDOW")),
                             group_cols="__row__"))[0]

# Get frequencies in the measurement set
# If these do not match those in the fits
# file we need to interpolate
ms_freqs = spw_ds.CHAN_FREQ.data.compute()
nchan = ms_freqs.size

# load in the fits file
model = fits.getdata(args.fitsmodel)
# get header
hdr = fits.getheader(args.fitsmodel)

# TODO - check that PHASE_DIR in MS matches that in fits
# get image coordinates
if hdr['CUNIT1'] != "DEG" and hdr['CUNIT1'] != "deg":
コード例 #3
0
ファイル: example.py プロジェクト: Mulan-94/xarray-ms
    from dask.diagnostics import Profiler, ProgressBar

    def create_parser():
        parser = argparse.ArgumentParser()
        parser.add_argument("ms")
        parser.add_argument("-c", "--chunks", default=10000, type=int)
        parser.add_argument("-s", "--scheduler", default="threaded")
        return parser

    args = create_parser().parse_args()

    with scheduler_context(args):
        # Create a dataset representing the entire antenna table
        ant_table = '::'.join((args.ms, 'ANTENNA'))

        for ant_ds in xds_from_table(ant_table):
            print(ant_ds)
            print(
                dask.compute(ant_ds.NAME.data, ant_ds.POSITION.data,
                             ant_ds.DISH_DIAMETER.data))

        # Create datasets representing each row of the spw table
        spw_table = '::'.join((args.ms, 'SPECTRAL_WINDOW'))

        for spw_ds in xds_from_table(spw_table, group_cols="__row__"):
            print(spw_ds)
            print(spw_ds.NUM_CHAN.values)
            print(spw_ds.CHAN_FREQ.values)

        # Create datasets from a partioning of the MS
        datasets = list(xds_from_ms(args.ms, chunks={'row': args.chunks}))
コード例 #4
0
            'ddid', "DATA_DESCRIPTION"), (
                'spw', "SPECTRAL_WINDOW"), ('pol',
                                            "POLARIZATION"), ('field',
                                                              "FIELD")]
    }

    with scheduler_context(args):
        # Get datasets from the main MS
        # partition by FIELD_ID and DATA_DESC_ID
        # and sorted by TIME
        datasets = xds_from_ms(args.ms,
                               group_cols=("FIELD_ID", "DATA_DESC_ID"),
                               index_cols="TIME")

        # Get the antenna dataset
        ant_ds = list(xds_from_table(table_name['antenna']))
        assert len(ant_ds) == 1
        ant_ds = ant_ds[0].rename({'row': 'antenna'}).drop('table_row')

        # Get datasets for DATA_DESCRIPTION, SPECTRAL_WINDOW
        # POLARIZATION and FIELD, partitioned by row
        ddid_ds = list(xds_from_table(table_name['ddid'],
                                      group_cols="__row__"))
        spwds = list(xds_from_table(table_name['spw'], group_cols="__row__"))
        pds = list(xds_from_table(table_name['pol'], group_cols="__row__"))
        field_ds = list(
            xds_from_table(table_name['field'], group_cols="__row__"))

        # For each partitioned dataset from the main MS,
        # assign additional arrays from the FIELD, SPECTRAL_WINDOW
        # and POLARISATION subtables