Esempio n. 1
0
    def test_zero_vis_online(self):
        """Check online pipeline exits gracefully if all data is flagged
        """
        # Create flagged Mock dataset and wrap it in a KatdalAdapter
        ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS,
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=self.spws,
                         dumps=self.scans,
                         flags=partial(flags, flagged=True))

        # Dummy CB_ID and Product ID and temp fits disk
        fd = kc.get_config()['fitsdirs']
        fd += [(None, '/tmp/FITS')]
        kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd)

        setup_aips_disks()

        # Create the pipeline
        pipeline = pipeline_factory('online', ds, TelescopeState(),
                                    katdal_select=self.select,
                                    uvblavg_params=self.uvblavg_params,
                                    mfimage_params=self.mfimage_params)

        metadata = pipeline.execute()
        # Check metadata is empty and no exceptions are thrown
        assert_equal(metadata, {})

        # Get fits area
        cfg = kc.get_config()
        fits_area = cfg['fitsdirs'][-1][1]

        # Remove the tmp/FITS dir
        shutil.rmtree(fits_area)
Esempio n. 2
0
    def test_new_online_pipeline(self):
        """
        Tests that a run of the online continuum pipeline exectues.
        """
        # Create Mock dataset and wrap it in a KatdalAdapter
        ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS,
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=self.spws,
                         dumps=self.scans)

        # Create a FAKE object
        FAKE = object()

        # Test that metadata agrees
        for k, v in DEFAULT_METADATA.items():
            self.assertEqual(v, getattr(ds, k, FAKE))

        # Dummy CB_ID and Product ID and temp fits disk
        fd = kc.get_config()['fitsdirs']
        fd += [(None, '/tmp/FITS')]
        kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd)

        setup_aips_disks()

        # Create the pipeline
        pipeline = pipeline_factory('online', ds, TelescopeState(),
                                    katdal_select=self.select,
                                    uvblavg_params=self.uvblavg_params,
                                    mfimage_params=self.mfimage_params)

        metadata = pipeline.execute()

        # Check that output FITS files exist and have the right names
        cfg = kc.get_config()
        cb_id = cfg['cb_id']
        out_id = cfg['output_id']
        fits_area = cfg['fitsdirs'][-1][1]

        for otarg in self.sanitised_target_names:
            out_strings = [cb_id, out_id, otarg, IMG_CLASS]
            filename = '_'.join(filter(None, out_strings)) + '.fits'
            assert_in(filename, metadata['FITSImageFilename'])
            filepath = os.path.join(fits_area, filename)
            assert os.path.isfile(filepath)
            _check_fits_headers(filepath)

        # Remove the tmp/FITS dir
        shutil.rmtree(fits_area)
Esempio n. 3
0
    def _source_info(self):
        """
        Infer MFImage source names and file outputs.
        For each source, MFImage outputs UV data and a CLEAN file.
        """

        target_indices = self.ka.target_indices[:]

        # Use output_id for labels
        label = kc.get_config()['output_id']

        # Source names
        uv_sources = [s["SOURCE"][0].strip() for s in self.ka.uv_source_rows]

        uv_files = [AIPSPath(name=s, disk=self.disk, label=label,
                    aclass=UV_CLASS, atype="UV")
                    for s in uv_sources]

        clean_files = [AIPSPath(name=s, disk=self.disk, label=label,
                       aclass=IMG_CLASS, atype="MA")
                       for s in uv_sources]

        # Find a maximum sequence number referencing unassigned
        # catalogue numbers for all uv and clean files
        max_uv_seq_nr = max(next_seq_nr(f) for f in uv_files)
        max_clean_seq_nr = max(next_seq_nr(f) for f in clean_files)

        uv_files = [f.copy(seq=max_uv_seq_nr) for f in uv_files]
        clean_files = [f.copy(seq=max_clean_seq_nr) for f in clean_files]

        return uv_sources, target_indices, uv_files, clean_files
Esempio n. 4
0
def link_obit_data():
    """
    Creates soft links to Obit data files within FITS directories
    """

    cfg = kc.get_config()
    # Directory in which Obit data file are located
    obit_data_glob = pjoin(cfg['obitroot'], 'ObitSystem', 'Obit', 'share',
                           'data', '*')
    # Data files we wish to symlink
    data_files = glob.glob(obit_data_glob)

    # Separate filename from full path
    filenames = [os.path.split(f)[1] for f in data_files]

    # In each FITS dir, create a link to each data file
    for url, fitsdir in cfg['fitsdirs']:
        # Fully expand link paths
        link_names = [pjoin(fitsdir, f) for f in filenames]

        # Remove any prior symlinks and then symlink
        for data_file, link_name in zip(data_files, link_names):
            if os.path.exists(link_name):
                os.remove(link_name)

            try:
                os.symlink(data_file, link_name)
            except OSError as e:
                log.warn("Unable to link '{}' to '{}'\n"
                         "{}".format(link_name, data_file, e))
Esempio n. 5
0
def rewrite_netsp():
    """
    Rewrite `cfg.aips.aipsroot.da00/DA00/NETSP` to reference
    the AIPS directories specified in our configuration
    """
    cfg = kc.get_config()
    netsp = pjoin(cfg['da00'], 'NETSP')
    backup = pjoin(cfg['da00'], '.NETSP.BAK')

    if not os.path.exists(netsp):
        log.warning(
            "Could not find '%s' for modification. "
            "Check your AIPS directory root '%s'.", netsp, cfg['aipsroot'])
        return

    # Make a copy of the original
    shutil.copy(netsp, backup)

    # Read the copy, writing back to the original
    with open(backup, "r") as rf, open(netsp, "w") as wf:
        # Retain comments
        for line in rf:
            if line.startswith('#'):
                wf.write(line)

        # Write out AIPS Directory parameters
        for url, aipsdir in cfg['aipsdirs']:
            log.info("Adding AIPS Disk '%s to '%s'", aipsdir, netsp)
            wf.write(aipsdir +
                     ' 365.0    0    0    0    0    0    0    0    0\n')

    # Remove the copy
    os.remove(backup)
Esempio n. 6
0
    def __init__(self):
        """
        Constructor

        Largely derived from
        https://github.com/bill-cotton/Obit/blob/master/ObitSystem/Obit/share/scripts/AIPSSetup.py
        """

        # Get the current configuration
        cfg = kc.get_config()

        self.err = err = OErr.OErr()
        self.obitsys = OSystem.OSystem("Pipeline", 1, cfg['userno'], 0, [" "],
                                       0, [" "], True, False, err)
        OErr.printErrMsg(err, "Error starting Obit System")

        # Setup AIPS userno
        AIPS.userno = cfg['userno']

        # Setup Obit Environment
        ObitTalkUtil.SetEnviron(AIPS_ROOT=cfg['aipsroot'],
                                AIPS_VERSION=cfg['aipsversion'],
                                OBIT_EXEC=cfg['obitexec'],
                                DA00=cfg['da00'],
                                ARCH="LINUX",
                                aipsdirs=cfg['aipsdirs'],
                                fitsdirs=cfg['fitsdirs'])
Esempio n. 7
0
def rewrite_dadevs():
    """
    Rewrite ``cfg.aips.aipsroot/DA00/DADEVS.LIST`` to reference
    the AIPS directories specified in our configuration
    """
    cfg = kc.get_config()
    dadevs_list = pjoin(cfg['da00'], 'DADEVS.LIST')
    backup = pjoin(cfg['da00'], '.DADEVS.LIST.BAK')

    if not os.path.exists(dadevs_list):
        log.warning(
            "Could not find '%s' for modification. "
            "Check your AIPS directory root '%s'.", dadevs_list,
            cfg['aipsroot'])

        return

    # Make a copy of the original
    shutil.copy(dadevs_list, backup)

    # Read the copy, writing back to the original
    with open(backup, "r") as rf, open(dadevs_list, "w") as wf:
        # Retain comments
        for line in rf:
            if line.startswith('#'):
                wf.write(line)

        # Write out AIPS directories
        for url, aipsdir in cfg['aipsdirs']:
            log.info("Adding AIPS Disk '%s' to '%s'", aipsdir, dadevs_list)
            wf.write("-  " + aipsdir + '\n')

    # Remove the copy
    os.remove(backup)
Esempio n. 8
0
    def __init__(self, katdata, telstate, uvblavg_params={}, mfimage_params={},
                 katdal_select={}, nvispio=10240):
        """
        Initialise the Continuum Pipeline for MeerKAT system processing.

        Parameters
        ----------
        katdata : :class:`katdal.Dataset`
            katdal Dataset object
        telstate : :class:`katsdptelstate.TelescopeState`
            Telescope state or Telescope state view
        uvblavg_params : dict
            Dictionary of UV baseline averaging task parameters
        mfimage_params : dict
            Dictionary of MFImage task parameters
        katdal_select : dict
            Dictionary of katdal selection statements.
        nvispio : integer
            Number of AIPS visibilities per IO operation.
        """

        super(OnlinePipeline, self).__init__(katdata)
        self.telstate = telstate
        self.uvblavg_params.update(uvblavg_params)
        self.mfimage_params.update(mfimage_params)
        self.katdal_select = katdal_select
        self.nvispio = nvispio

        # Use highest numbered FITS disk for FITS output.
        self.odisk = len(kc.get_config()['fitsdirs'])
        self.prtlv = 2
        self.disk = 1
Esempio n. 9
0
    def execute_implementation(self):
        result_tuple = self._select_and_infer_files()
        uv_sources, target_indices, uv_files, clean_files = result_tuple
        if "mfimage" in self.clobber:
            self.cleanup_uv_files += uv_files
        if "clean" in self.clobber:
            self.cleanup_img_files += clean_files
        # Update MFImage source selection
        self.mfimage_params['Sources'] = uv_sources
        # Find the highest numbered merge file if we are reusing
        if self.reuse:
            uv_mp = self.ka.aips_path(aclass='merge', name=kc.get_config()['cb_id'])
            # Find the merge file with the highest seq #
            hiseq = next_seq_nr(uv_mp) - 1
            # hiseq will be zero if the aipsdisk has no 'merge' file
            if hiseq == 0:
                raise ValueError("AIPS disk at '%s' has no 'merge' file to reuse." %
                                 (kc.get_config()['aipsdirs'][self.disk - 1][-1]))
            else:
                # Get the AIPS entry of the UV data to reuse
                self.uv_merge_path = uv_mp.copy(seq=hiseq)
                log.info("Re-using UV data in '%s' from AIPS disk: '%s'",
                         self.uv_merge_path, kc.get_config()['aipsdirs'][self.disk - 1][-1])
                merge_uvf = uv_factory(aips_path=self.uv_merge_path, mode='r',
                                       nvispio=self.nvispio)

                merge_nvis = merge_uvf.nvis_from_NX()
        else:
            merge_nvis = self._export_and_merge_scans()
        if "merge" in self.clobber:
            self.cleanup_uv_files.append(self.uv_merge_path)
        log.info('There are %s visibilities in the merged file', merge_nvis)
        if merge_nvis < 1:
            return {}
        else:
            self._run_mfimage(self.uv_merge_path, uv_sources)

            self._get_wavg_img(clean_files)
            for uv, clean in zip(uv_files, clean_files):
                self._attach_SN_tables_to_image(uv, clean)

            metadata = export_images(clean_files, target_indices, self.odisk, self.ka)
            return metadata
Esempio n. 10
0
    def __init__(self, katdata, uvblavg_params={}, mfimage_params={},
                 katdal_select={}, nvispio=10240, prtlv=2,
                 clobber=set(['scans', 'avgscans']), time_step=20, reuse=False):
        """
        Initialise the Continuum Pipeline for offline imaging
        using a katdal dataset.

        Parameters
        ----------
        katdata : :class:`katdal.Dataset`
            katdal Dataset object
        uvblavg_params : dict
            Dictionary of UV baseline averaging task parameters
        mfimage_params : dict
            Dictionary of MFImage task parameters
        katdal_select : dict
            Dictionary of katdal selection statements.
        nvispio : integer
            Number of AIPS visibilities per IO operation.
        prtlv : integer
            Chattiness of Obit tasks
        clobber : set or iterable
            Set or iterable of output files to clobber from the aipsdisk.
            Possible values include:
            1. `'scans'`, UV data files containing observational
                data for individual scans.
            2. `'avgscans'`, UV data files containing time-dependent
                baseline data for individual scans.
            3. `'merge'`, UV data file containing merged averaged scans.
            4. `'clean'`, Output images from MFImage.
            5. `'mfimage'`, Output UV data file from MFImage."
        time_step : int
            Size of time chunks to read vis, weights & flags from katdata.
        reuse : bool
            Are we reusing a previous katdal export in the aipsdisk?
        """

        super(KatdalOfflinePipeline, self).__init__(katdata)
        self.uvblavg_params = uvblavg_params
        self.mfimage_params = mfimage_params
        self.katdal_select = katdal_select
        self.nvispio = nvispio
        self.prtlv = prtlv
        self.clobber = clobber
        self.reuse = reuse
        self.time_step = time_step
        self.odisk = len(kc.get_config()['fitsdirs'])
Esempio n. 11
0
def setup_aips_disks():
    """
    Ensure that each AIPS and FITS disk (directory) exists.
    Creates a SPACE file within the aips disks.
    """
    cfg = kc.get_config()
    for url, aipsdir in cfg['aipsdirs']:
        # Create directory if it doesn't exist
        if not os.path.exists(aipsdir):
            log.info("Creating AIPS Disk '%s'", aipsdir)
            os.makedirs(aipsdir)
        # Create SPACE file
        space = os.path.join(aipsdir, 'SPACE')

        with open(space, 'a'):
            os.utime(space, None)

    for url, fitsdir in cfg['fitsdirs']:
        # Create directory if it doesn't exist
        if not os.path.exists(fitsdir):
            log.info("Creating FITS Disk '%s'", fitsdir)
            os.makedirs(fitsdir)
Esempio n. 12
0
def main():
    parser = create_parser()
    args = parser.parse_args()
    configure_logging(args)
    log.info("Reading data with applycal=%s", args.applycal)
    katdata = katdal.open(args.katdata,
                          applycal=args.applycal,
                          **args.open_args)

    # Apply the supplied mask to the flags
    if args.mask:
        apply_user_mask(katdata, args.mask)

    # Set up katdal selection based on arguments
    kat_select = {'pol': args.pols, 'nif': args.nif}

    if args.targets:
        kat_select['targets'] = args.targets
    if args.channels:
        start_chan, end_chan = args.channels
        kat_select['channels'] = slice(start_chan, end_chan)

    # Command line katdal selection overrides command line options
    kat_select = recursive_merge(args.select, kat_select)

    # Get band and determine default .yaml files
    band = katdata.spectral_windows[katdata.spw].band
    uvblavg_parm_file = args.uvblavg_config
    if not uvblavg_parm_file:
        uvblavg_parm_file = os.path.join(os.sep, "obitconf",
                                         f"uvblavg_{band}.yaml")
    log.info('UVBlAvg parameter file for %s-band: %s', band, uvblavg_parm_file)
    mfimage_parm_file = args.mfimage_config
    if not mfimage_parm_file:
        mfimage_parm_file = os.path.join(os.sep, "obitconf",
                                         f"mfimage_{band}.yaml")
    log.info('MFImage parameter file for %s-band: %s', band, mfimage_parm_file)

    # Get defaults for uvblavg and mfimage and merge user supplied ones
    uvblavg_args = get_and_merge_args(uvblavg_parm_file, args.uvblavg)
    mfimage_args = get_and_merge_args(mfimage_parm_file, args.mfimage)

    # Grab the cal refant from the katdal dataset and default to
    # it if it is available and hasn't been set by the user.
    ts = katdata.source.telstate
    refant = ts.get('cal_refant')
    if refant is not None and 'refAnt' not in mfimage_args:
        mfimage_args['refAnt'] = aips_ant_nr(refant)

    # Try and always average down to 1024 channels if the user
    # hasn't specified something else
    num_chans = len(katdata.channels)
    factor = num_chans // 1024
    if 'avgFreq' not in uvblavg_args:
        if factor > 1:
            uvblavg_args['avgFreq'] = 1
            uvblavg_args['chAvg'] = factor

    # Get the default config.
    dc = kc.get_config()

    # capture_block_id is used to generate AIPS disk filenames
    capture_block_id = katdata.obs_params['capture_block_id']

    if args.reuse:
        # Set up AIPS disk from specified directory
        if os.path.exists(args.reuse):
            aipsdirs = [(None, args.reuse)]
            log.info('Re-using AIPS data area: %s', aipsdirs[0][1])
            reuse = True
        else:
            msg = "AIPS disk at '%s' does not exist." % (args.reuse)
            log.exception(msg)
            raise IOError(msg)
    else:
        # Set up aipsdisk configuration from args.workdir
        aipsdirs = [(None,
                     os.path.join(args.workdir,
                                  capture_block_id + '_aipsdisk'))]
        log.info('Using AIPS data area: %s', aipsdirs[0][1])
        reuse = False

    # Set up output configuration from args.outputdir
    fitsdirs = dc['fitsdirs']

    # Append outputdir to fitsdirs
    fitsdirs += [(None, args.outputdir)]
    log.info('Using output data area: %s', args.outputdir)

    kc.set_config(aipsdirs=aipsdirs,
                  fitsdirs=fitsdirs,
                  output_id='',
                  cb_id=capture_block_id)

    setup_aips_disks()

    pipeline = pipeline_factory('offline',
                                katdata,
                                katdal_select=kat_select,
                                uvblavg_params=uvblavg_args,
                                mfimage_params=mfimage_args,
                                nvispio=args.nvispio,
                                clobber=args.clobber,
                                prtlv=args.prtlv,
                                reuse=reuse)

    # Execute it
    pipeline.execute()
Esempio n. 13
0
def main():
    setup_logging()
    parser = create_parser()
    args = parser.parse_args()

    # Open the observation
    if (args.access_key is not None) != (args.secret_key is not None):
        parser.error('--access-key and --secret-key must be used together')
    if args.access_key is not None and args.token is not None:
        parser.error('--access-key/--secret-key cannot be used with --token')
    open_kwargs = {}
    if args.access_key is not None:
        open_kwargs['credentials'] = (args.access_key, args.secret_key)
    elif args.token is not None:
        open_kwargs['token'] = args.token
    katdata = katdal.open(args.katdata, applycal='l1', **open_kwargs)

    post_process_args(args, katdata)

    uvblavg_args, mfimage_args, band = _infer_defaults_from_katdal(katdata)

    # Get config defaults for uvblavg and mfimage and merge user supplied ones
    uvblavg_parm_file = pjoin(CONFIG, f'uvblavg_MKAT_{band}.yaml')
    log.info('UVBlAvg parameter file for %s-band: %s', band, uvblavg_parm_file)
    mfimage_parm_file = pjoin(CONFIG, f'mfimage_MKAT_{band}.yaml')
    log.info('MFImage parameter file for %s-band: %s', band, mfimage_parm_file)

    user_uvblavg_args = get_and_merge_args(uvblavg_parm_file, args.uvblavg)
    user_mfimage_args = get_and_merge_args(mfimage_parm_file, args.mfimage)

    # Merge katdal defaults with user supplied defaults
    recursive_merge(user_uvblavg_args, uvblavg_args)
    recursive_merge(user_mfimage_args, mfimage_args)

    # Get the default config.
    dc = kc.get_config()
    # Set up aipsdisk configuration from args.workdir
    if args.workdir is not None:
        aipsdirs = [(None,
                     pjoin(args.workdir, args.capture_block_id + '_aipsdisk'))]
    else:
        aipsdirs = dc['aipsdirs']
    log.info('Using AIPS data area: %s', aipsdirs[0][1])

    # Set up output configuration from args.outputdir
    fitsdirs = dc['fitsdirs']

    outputname = args.capture_block_id + OUTDIR_SEPARATOR + args.telstate_id + \
        OUTDIR_SEPARATOR + START_TIME

    outputdir = pjoin(args.outputdir, outputname)
    # Set writing tag for duration of the pipeline
    work_outputdir = outputdir + WRITE_TAG
    # Append outputdir to fitsdirs
    # NOTE: Pipeline is set up to always place its output in the
    # highest numbered fits disk so we ensure that is the case
    # here.
    fitsdirs += [(None, work_outputdir)]
    log.info('Using output data area: %s', outputdir)

    kc.set_config(aipsdirs=aipsdirs, fitsdirs=fitsdirs)

    setup_aips_disks()

    # Add output_id and capture_block_id to configuration
    kc.set_config(cfg=kc.get_config(),
                  output_id=args.output_id,
                  cb_id=args.capture_block_id)

    # Set up telstate link then create
    # a view based the capture block ID and output ID
    telstate = TelescopeState(args.telstate)
    view = telstate.join(args.capture_block_id, args.telstate_id)
    ts_view = telstate.view(view)

    katdal_select = args.select
    katdal_select['nif'] = args.nif

    # Create Continuum Pipeline
    pipeline = pipeline_factory('online',
                                katdata,
                                ts_view,
                                katdal_select=katdal_select,
                                uvblavg_params=uvblavg_args,
                                mfimage_params=mfimage_args,
                                nvispio=args.nvispio)

    # Execute it
    metadata = pipeline.execute()

    # Create QA products if images were created
    if metadata:
        make_pbeam_images(metadata, outputdir, WRITE_TAG)
        make_qa_report(metadata, outputdir, WRITE_TAG)
        organise_qa_output(metadata, outputdir, WRITE_TAG)

        # Remove the writing tag from the output directory
        os.rename(work_outputdir, outputdir)
    else:
        os.rmdir(work_outputdir)
Esempio n. 14
0
    def _export_and_merge_scans(self):
        """
        1. Read scans from katdal
        2. Export scan data to an AIPS UV file
        3. Baseline average the file.
        4. Merge averaged AIPS UV file into a merge UV file.
        """

        # The merged UV observation file. We wait until
        # we have a baseline averaged file with which to condition it
        merge_uvf = None

        uv_mp = self.ka.aips_path(aclass='merge', name=kc.get_config()['cb_id'])
        self.uv_merge_path = uv_mp.copy(seq=next_seq_nr(uv_mp))

        global_desc = self.ka.uv_descriptor()
        global_table_cmds = self.ka.default_table_cmds()

        # FORTRAN indexing
        merge_firstVis = 1

        # Scan indices
        scan_indices = [int(si) for si in self.ka.scan_indices]

        merge_blavg_nvis = 0
        # Export each scan individually, baseline averaging and merging it
        # into the final observation file.
        # NOTE: Loop over scan indices here rather than using the ka.scans
        # generator to avoid a conflict with the loop over ka.scans in uv_export.
        for si in scan_indices:
            # Select the current scan
            self.ka.select(scans=si)
            # Get path, with sequence based on scan index
            scan_path = self.uv_merge_path.copy(aclass='raw', seq=int(si))
            # Get the AIPS source for logging purposes
            aips_source = self.ka.catalogue[self.ka.target_indices[0]]
            aips_source_name = aips_source["SOURCE"][0].strip()

            log.info("Creating '%s'", scan_path)

            # Create a UV file for the scan and export to it
            with uv_factory(aips_path=scan_path, mode="w",
                            nvispio=self.nvispio,
                            table_cmds=global_table_cmds,
                            desc=global_desc) as uvf:

                uv_export(self.ka, uvf, time_step=self.time_step)

            # Retrieve the single scan index.
            # The time centroids and interval should be correct
            # but the visibility indices need to be repurposed
            scan_uvf = uv_factory(aips_path=scan_path, mode='r',
                                  nvispio=self.nvispio)

            assert len(scan_uvf.tables["AIPS NX"].rows) == 1
            nx_row = scan_uvf.tables["AIPS NX"].rows[0].copy()
            scan_nvis = scan_uvf.nvis_from_NX()

            # If we should be merging scans
            # just use the existing scan path and file
            if self.merge_scans:
                blavg_path = scan_path
                blavg_uvf = scan_uvf
            # Otherwise performing baseline averaging, deriving
            # a new scan path and file
            else:
                # Perform baseline averaging
                blavg_path = self._blavg_scan(scan_path)
                blavg_uvf = uv_factory(aips_path=blavg_path,
                                       mode='r',
                                       nvispio=self.nvispio)

            # Create the merge UV file, if necessary
            merge_uvf = self._maybe_create_merge_uvf(merge_uvf, blavg_uvf,
                                                     global_table_cmds)

            blavg_nvis = blavg_uvf.nvis_from_NX()
            merge_blavg_nvis += blavg_nvis

            # Record something about the baseline averaging process
            param_str = ', '.join("%s=%s" % (k, v)
                                  for k, v
                                  in self.uvblavg_params.items())

            blavg_history = ("Scan %d '%s' averaged "
                             "%s to %s visiblities. UVBlAvg(%s)" %
                             (si, aips_source_name, scan_nvis,
                              blavg_nvis, param_str))

            log.info(blavg_history)

            merge_uvf.append_history(blavg_history)
            if blavg_nvis > 0:
                log.info("Merging '%s' into '%s'", blavg_path, self.uv_merge_path)
                merge_firstVis = self._copy_scan_to_merge(merge_firstVis,
                                                          merge_uvf, blavg_uvf,
                                                          nx_row)
            else:
                log.warn("No visibilities to merge for scan %d", si)

            # Remove scan once merged
            if 'scans' in self.clobber:
                log.info("Zapping '%s'", scan_uvf.aips_path)
                scan_uvf.Zap()
            else:
                scan_uvf.Close()

            # If merging scans for testing purposes, our
            # baseline averaged file will be the same as the
            # scan file, which was handled above, so don't
            # delete again. Otherwise default to
            # normal clobber handling.
            if not self.merge_scans:
                if 'avgscans' in self.clobber:
                    log.info("Zapping '%s'", blavg_uvf.aips_path)
                    blavg_uvf.Zap()
                else:
                    blavg_uvf.Close()

        if merge_blavg_nvis == 0:
            log.error("Final merged file '%s' has ZERO averaged visibilities",
                      self.uv_merge_path)
        # Write the index table
        merge_uvf.tables["AIPS NX"].write()

        # Create an empty calibration table
        merge_uvf.attach_CL_from_NX_table(self.ka.max_antenna_number)

        # Close merge file
        merge_uvf.close()

        return merge_blavg_nvis
Esempio n. 15
0
    def test_gains_export(self):
        """Check l2 export to telstate"""
        nchan = 128
        nif = 4
        dump_period = 1.0
        centre_freq = 1200.e6
        bandwidth = 100.e6
        solPint = dump_period / 2.
        solAint = dump_period
        AP_telstate = 'product_GAMP_PHASE'
        P_telstate = 'product_GPHASE'

        spws = [{'centre_freq': centre_freq,
                 'num_chans': nchan,
                 'channel_width': bandwidth / nchan,
                 'sideband': 1,
                 'band': 'L'}]
        ka_select = {'pol': 'HH,VV', 'scans': 'track',
                     'corrprods': 'cross', 'nif': nif}
        uvblavg_params = {'maxFact': 1.0, 'avgFreq': 0,
                          'FOV': 100.0, 'maxInt': 1.e-6}
        mfimage_params = {'Niter': 50, 'FOV': 0.1,
                          'xCells': 5., 'yCells': 5.,
                          'doGPU': False, 'Robust': -1.5,
                          'minFluxPSC': 0.1, 'solPInt': solPint / 60.,
                          'solPMode': 'P', 'minFluxASC': 0.1,
                          'solAInt': solAint / 60., 'maxFBW': 0.02}

        # Simulate a '10Jy' source at the phase center
        cat = katpoint.Catalogue()
        cat.add(katpoint.Target(
            "Alberich lord of the Nibelungs, radec, 20.0, -30.0, (856. 1712. 1. 0. 0.)"))

        telstate = TelescopeState()

        # Set up a scratch space in /tmp
        fd = kc.get_config()['fitsdirs']
        fd += [(None, '/tmp/FITS')]
        kc.set_config(cb_id='CBID', fitsdirs=fd)
        setup_aips_disks()

        scan = [('track', 4, cat.targets[0])]

        # Construct a simulated dataset with our
        # point source at the centre of the field
        ds = MockDataSet(timestamps={'start_time': 0.0, 'dump_period': dump_period},
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=spws,
                         dumps=scan,
                         vis=partial(vis, sources=cat),
                         weights=weights,
                         flags=flags)

        # Try one round of phase only self-cal & Amp+Phase self-cal
        mfimage_params['maxPSCLoop'] = 1
        mfimage_params['maxASCLoop'] = 1

        # Run the pipeline
        pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select,
                                    uvblavg_params=uvblavg_params,
                                    mfimage_params=mfimage_params)
        pipeline.execute()

        ts = telstate.view('selfcal')
        # Check what we have in telstate agrees with what we put in
        self.assertEqual(len(ts['antlist']), len(ANTENNA_DESCRIPTIONS))
        self.assertEqual(ts['bandwidth'], bandwidth)
        self.assertEqual(ts['n_chans'], nif)
        pol_ordering = [pol[0] for pol in sorted(CORR_ID_MAP, key=CORR_ID_MAP.get)
                        if pol[0] == pol[1]]
        self.assertEqual(ts['pol_ordering'], pol_ordering)
        if_width = bandwidth / nif
        center_if = nif // 2
        start_freq = centre_freq - (bandwidth / 2.)
        self.assertEqual(ts['center_freq'], start_freq + if_width * (center_if + 0.5))

        self.assertIn(ts.join('selfcal', P_telstate), ts.keys())
        self.assertIn(ts.join('selfcal', AP_telstate), ts.keys())

        def check_gains_timestamps(gains, expect_timestamps):
            timestamps = []
            for gain, timestamp in gains:
                np.testing.assert_array_almost_equal(np.abs(gain), 1.0, decimal=3)
                np.testing.assert_array_almost_equal(np.angle(gain), 0.0)
                timestamps.append(timestamp)
            np.testing.assert_array_almost_equal(timestamps, expect_timestamps, decimal=1)

        # Check phase-only gains and timestamps
        P_times = np.arange(solPint, ds.end_time.secs, 2. * solPint)
        check_gains_timestamps(ts.get_range(P_telstate, st=0), P_times)
        # Check Amp+Phase gains
        AP_times = np.arange(solAint, ds.end_time.secs, 2. * solAint)
        check_gains_timestamps(ts.get_range(AP_telstate, st=0), AP_times)

        # Check with no Amp+Phase self-cal
        mfimage_params['maxASCLoop'] = 0
        telstate.clear()
        pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select,
                                    uvblavg_params=uvblavg_params,
                                    mfimage_params=mfimage_params)
        pipeline.execute()
        self.assertIn(telstate.join('selfcal', P_telstate), ts.keys())
        self.assertNotIn(telstate.join('selfcal', AP_telstate), ts.keys())

        # Check with no self-cal
        mfimage_params['maxPSCLoop'] = 0
        telstate.clear()
        pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select,
                                    uvblavg_params=uvblavg_params,
                                    mfimage_params=mfimage_params)
        pipeline.execute()
        self.assertNotIn(telstate.join('selfcal', P_telstate), ts.keys())
        self.assertNotIn(telstate.join('selfcal', AP_telstate), ts.keys())

        # Cleanup workspace
        shutil.rmtree(fd[-1][1])
Esempio n. 16
0
    def test_cc_export(self):
        """Check CC models returned by MFImage
        """
        nchan = 128

        spws = [{'centre_freq': .856e9 + .856e9 / 2.,
                 'num_chans': nchan,
                 'channel_width': .856e9 / nchan,
                 'sideband': 1,
                 'band': 'L'}]

        katdal_select = {'pol': 'HH,VV', 'scans': 'track',
                         'corrprods': 'cross'}
        uvblavg_params = {'FOV': 0.2, 'avgFreq': 0,
                          'chAvg': 1, 'maxInt': 2.0}

        cat = katpoint.Catalogue()
        cat.add(katpoint.Target("Amfortas, radec, 0.0, -90.0, (856. 1712. 1. 0. 0.)"))
        cat.add(katpoint.Target("Klingsor, radec, 0.0, 0.0, (856. 1712. 2. -0.7 0.1)"))
        cat.add(katpoint.Target("Kundry, radec, 100.0, -35.0, (856. 1712. -1.0 1. -0.1)"))

        ts = TelescopeState()

        # Set up a scratch space in /tmp
        fd = kc.get_config()['fitsdirs']
        fd += [(None, '/tmp/FITS')]
        kc.set_config(cb_id='CBID', fitsdirs=fd)

        setup_aips_disks()

        # Point sources with various flux models
        for targ in cat:
            scans = [('track', 5, targ)]
            ds = MockDataSet(timestamps={'start_time': 1.0, 'dump_period': 4.0},
                             subarrays=DEFAULT_SUBARRAYS,
                             spws=spws,
                             dumps=scans,
                             vis=partial(vis, sources=[targ]),
                             weights=weights,
                             flags=flags)

            # 100 clean components
            mfimage_params = {'Niter': 100, 'maxFBW': 0.05,
                              'FOV': 0.1, 'xCells': 5.,
                              'yCells': 5., 'doGPU': False}

            pipeline = pipeline_factory('online', ds, ts, katdal_select=katdal_select,
                                        uvblavg_params=uvblavg_params,
                                        mfimage_params=mfimage_params)
            pipeline.execute()

            # Get the fitted CCs from telstate
            fit_cc = ts.get('target0_clean_components')
            ts.delete('target0_clean_components')

            all_ccs = katpoint.Catalogue(fit_cc['components'])
            # Should have one merged and fitted component
            self.assertEqual(len(all_ccs), 1)

            cc = all_ccs.targets[0]
            out_fluxmodel = cc.flux_model
            in_fluxmodel = targ.flux_model

            # Check the flux densities of the flux model in the fitted CC's
            test_freqs = np.linspace(out_fluxmodel.min_freq_MHz, out_fluxmodel.max_freq_MHz, 5)
            in_flux = in_fluxmodel.flux_density(test_freqs)
            out_flux = out_fluxmodel.flux_density(test_freqs)
            np.testing.assert_allclose(out_flux, in_flux, rtol=1.e-3)

        # A field with some off axis sources to check positions
        offax_cat = katpoint.Catalogue()
        offax_cat.add(katpoint.Target("Titurel, radec, 100.1, -35.05, (856. 1712. 1.1 0. 0.)"))
        offax_cat.add(katpoint.Target("Gurmenanz, radec, 99.9, -34.95, (856. 1712. 1. 0. 0.)"))

        scans = [('track', 5, cat.targets[2])]
        ds = MockDataSet(timestamps={'start_time': 1.0, 'dump_period': 4.0},
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=spws,
                         dumps=scans,
                         vis=partial(vis, sources=offax_cat),
                         weights=weights,
                         flags=flags)

        # Small number of CC's and high gain (not checking flux model)
        mfimage_params['Niter'] = 4
        mfimage_params['FOV'] = 0.2
        mfimage_params['Gain'] = 0.5
        mfimage_params['Robust'] = -5

        pipeline = pipeline_factory('online', ds, ts, katdal_select=katdal_select,
                                    uvblavg_params=uvblavg_params,
                                    mfimage_params=mfimage_params)
        pipeline.execute()
        fit_cc = ts.get('target0_clean_components')
        ts.delete('target0_clean_components')
        all_ccs = katpoint.Catalogue(fit_cc['components'])
        # We should have 2 merged clean components for two source positions
        self.assertEqual(len(all_ccs), 2)

        # Check the positions of the clean components
        # These will be ordered by decreasing flux density of the inputs
        # Position should be accurate to within a 5" pixel
        delta_dec = np.deg2rad(5./3600.)
        for model, cc in zip(offax_cat.targets, all_ccs.targets):
            delta_ra = delta_dec/np.cos(model.radec()[1])
            self.assertAlmostEqual(cc.radec()[0], model.radec()[0], delta=delta_ra)
            self.assertAlmostEqual(cc.radec()[1], model.radec()[1], delta=delta_dec)

        # Empty the scratch space
        shutil.rmtree(fd[-1][1])
Esempio n. 17
0
    def test_offline_pipeline(self):
        """
        Tests that a run of the offline continuum pipeline executes.
        """

        # Create Mock dataset and wrap it in a KatdalAdapter
        ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS,
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=self.spws,
                         dumps=self.scans)

        # Dummy CB_ID and Product ID and temp fits and aips disks
        fd = kc.get_config()['fitsdirs']
        fd += [(None, os.path.join(os.sep, 'tmp', 'FITS'))]
        kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd)

        setup_aips_disks()

        # Create and run the pipeline
        pipeline = pipeline_factory('offline', ds,
                                    katdal_select=self.select,
                                    uvblavg_params=self.uvblavg_params,
                                    mfimage_params=self.mfimage_params,
                                    clobber=CLOBBER.difference({'merge'}))

        pipeline.execute()

        # Check that output FITS files exist and have the right names
        # Now check for files
        cfg = kc.get_config()
        cb_id = cfg['cb_id']
        out_id = cfg['output_id']
        fits_area = cfg['fitsdirs'][-1][1]

        out_strings = [cb_id, out_id, self.target_name, IMG_CLASS]
        filename = '_'.join(filter(None, out_strings)) + '.fits'
        filepath = os.path.join(fits_area, filename)
        assert os.path.isfile(filepath)
        _check_fits_headers(filepath)

        # Remove the tmp/FITS dir
        shutil.rmtree(fits_area)

        ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS,
                         subarrays=DEFAULT_SUBARRAYS,
                         spws=self.spws,
                         dumps=self.scans)

        setup_aips_disks()

        # Create and run the pipeline (Reusing the previous data)
        pipeline = pipeline_factory('offline', ds,
                                    katdal_select=self.select,
                                    uvblavg_params=self.uvblavg_params,
                                    mfimage_params=self.mfimage_params,
                                    reuse=True,
                                    clobber=CLOBBER)

        metadata = pipeline.execute()
        assert_in(filename, metadata['FITSImageFilename'])
        assert os.path.isfile(filepath)
        _check_fits_headers(filepath)

        # Remove FITS temporary area
        shutil.rmtree(fits_area)