def test_zero_vis_online(self): """Check online pipeline exits gracefully if all data is flagged """ # Create flagged Mock dataset and wrap it in a KatdalAdapter ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS, subarrays=DEFAULT_SUBARRAYS, spws=self.spws, dumps=self.scans, flags=partial(flags, flagged=True)) # Dummy CB_ID and Product ID and temp fits disk fd = kc.get_config()['fitsdirs'] fd += [(None, '/tmp/FITS')] kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd) setup_aips_disks() # Create the pipeline pipeline = pipeline_factory('online', ds, TelescopeState(), katdal_select=self.select, uvblavg_params=self.uvblavg_params, mfimage_params=self.mfimage_params) metadata = pipeline.execute() # Check metadata is empty and no exceptions are thrown assert_equal(metadata, {}) # Get fits area cfg = kc.get_config() fits_area = cfg['fitsdirs'][-1][1] # Remove the tmp/FITS dir shutil.rmtree(fits_area)
def test_new_online_pipeline(self): """ Tests that a run of the online continuum pipeline exectues. """ # Create Mock dataset and wrap it in a KatdalAdapter ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS, subarrays=DEFAULT_SUBARRAYS, spws=self.spws, dumps=self.scans) # Create a FAKE object FAKE = object() # Test that metadata agrees for k, v in DEFAULT_METADATA.items(): self.assertEqual(v, getattr(ds, k, FAKE)) # Dummy CB_ID and Product ID and temp fits disk fd = kc.get_config()['fitsdirs'] fd += [(None, '/tmp/FITS')] kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd) setup_aips_disks() # Create the pipeline pipeline = pipeline_factory('online', ds, TelescopeState(), katdal_select=self.select, uvblavg_params=self.uvblavg_params, mfimage_params=self.mfimage_params) metadata = pipeline.execute() # Check that output FITS files exist and have the right names cfg = kc.get_config() cb_id = cfg['cb_id'] out_id = cfg['output_id'] fits_area = cfg['fitsdirs'][-1][1] for otarg in self.sanitised_target_names: out_strings = [cb_id, out_id, otarg, IMG_CLASS] filename = '_'.join(filter(None, out_strings)) + '.fits' assert_in(filename, metadata['FITSImageFilename']) filepath = os.path.join(fits_area, filename) assert os.path.isfile(filepath) _check_fits_headers(filepath) # Remove the tmp/FITS dir shutil.rmtree(fits_area)
def create_parser(): formatter_class = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(formatter_class=formatter_class) parser.add_argument("-a", "--aipsdisks", default=None, type=lambda s: [(None, ds.strip()) for ds in s.split(',')], help="Comma separated list of paths to aipsdisks.") parser.add_argument("-f", "--fitsdisks", default=None, type=lambda s: [(None, ds.strip()) for ds in s.split(',')], help="Comma separated list of paths to fitsdisks.") return parser setup_logging() args = create_parser().parse_args() kc.set_config(aipsdirs=args.aipsdisks, fitsdirs=args.fitsdisks) setup_aips_disks() rewrite_dadevs() rewrite_netsp() link_obit_data()
def main(): setup_logging() parser = create_parser() args = parser.parse_args() # Open the observation if (args.access_key is not None) != (args.secret_key is not None): parser.error('--access-key and --secret-key must be used together') if args.access_key is not None and args.token is not None: parser.error('--access-key/--secret-key cannot be used with --token') open_kwargs = {} if args.access_key is not None: open_kwargs['credentials'] = (args.access_key, args.secret_key) elif args.token is not None: open_kwargs['token'] = args.token katdata = katdal.open(args.katdata, applycal='l1', **open_kwargs) post_process_args(args, katdata) uvblavg_args, mfimage_args, band = _infer_defaults_from_katdal(katdata) # Get config defaults for uvblavg and mfimage and merge user supplied ones uvblavg_parm_file = pjoin(CONFIG, f'uvblavg_MKAT_{band}.yaml') log.info('UVBlAvg parameter file for %s-band: %s', band, uvblavg_parm_file) mfimage_parm_file = pjoin(CONFIG, f'mfimage_MKAT_{band}.yaml') log.info('MFImage parameter file for %s-band: %s', band, mfimage_parm_file) user_uvblavg_args = get_and_merge_args(uvblavg_parm_file, args.uvblavg) user_mfimage_args = get_and_merge_args(mfimage_parm_file, args.mfimage) # Merge katdal defaults with user supplied defaults recursive_merge(user_uvblavg_args, uvblavg_args) recursive_merge(user_mfimage_args, mfimage_args) # Get the default config. dc = kc.get_config() # Set up aipsdisk configuration from args.workdir if args.workdir is not None: aipsdirs = [(None, pjoin(args.workdir, args.capture_block_id + '_aipsdisk'))] else: aipsdirs = dc['aipsdirs'] log.info('Using AIPS data area: %s', aipsdirs[0][1]) # Set up output configuration from args.outputdir fitsdirs = dc['fitsdirs'] outputname = args.capture_block_id + OUTDIR_SEPARATOR + args.telstate_id + \ OUTDIR_SEPARATOR + START_TIME outputdir = pjoin(args.outputdir, outputname) # Set writing tag for duration of the pipeline work_outputdir = outputdir + WRITE_TAG # Append outputdir to fitsdirs # NOTE: Pipeline is set up to always place its output in the # highest numbered fits disk so we ensure that is the case # here. fitsdirs += [(None, work_outputdir)] log.info('Using output data area: %s', outputdir) kc.set_config(aipsdirs=aipsdirs, fitsdirs=fitsdirs) setup_aips_disks() # Add output_id and capture_block_id to configuration kc.set_config(cfg=kc.get_config(), output_id=args.output_id, cb_id=args.capture_block_id) # Set up telstate link then create # a view based the capture block ID and output ID telstate = TelescopeState(args.telstate) view = telstate.join(args.capture_block_id, args.telstate_id) ts_view = telstate.view(view) katdal_select = args.select katdal_select['nif'] = args.nif # Create Continuum Pipeline pipeline = pipeline_factory('online', katdata, ts_view, katdal_select=katdal_select, uvblavg_params=uvblavg_args, mfimage_params=mfimage_args, nvispio=args.nvispio) # Execute it metadata = pipeline.execute() # Create QA products if images were created if metadata: make_pbeam_images(metadata, outputdir, WRITE_TAG) make_qa_report(metadata, outputdir, WRITE_TAG) organise_qa_output(metadata, outputdir, WRITE_TAG) # Remove the writing tag from the output directory os.rename(work_outputdir, outputdir) else: os.rmdir(work_outputdir)
def test_gains_export(self): """Check l2 export to telstate""" nchan = 128 nif = 4 dump_period = 1.0 centre_freq = 1200.e6 bandwidth = 100.e6 solPint = dump_period / 2. solAint = dump_period AP_telstate = 'product_GAMP_PHASE' P_telstate = 'product_GPHASE' spws = [{'centre_freq': centre_freq, 'num_chans': nchan, 'channel_width': bandwidth / nchan, 'sideband': 1, 'band': 'L'}] ka_select = {'pol': 'HH,VV', 'scans': 'track', 'corrprods': 'cross', 'nif': nif} uvblavg_params = {'maxFact': 1.0, 'avgFreq': 0, 'FOV': 100.0, 'maxInt': 1.e-6} mfimage_params = {'Niter': 50, 'FOV': 0.1, 'xCells': 5., 'yCells': 5., 'doGPU': False, 'Robust': -1.5, 'minFluxPSC': 0.1, 'solPInt': solPint / 60., 'solPMode': 'P', 'minFluxASC': 0.1, 'solAInt': solAint / 60., 'maxFBW': 0.02} # Simulate a '10Jy' source at the phase center cat = katpoint.Catalogue() cat.add(katpoint.Target( "Alberich lord of the Nibelungs, radec, 20.0, -30.0, (856. 1712. 1. 0. 0.)")) telstate = TelescopeState() # Set up a scratch space in /tmp fd = kc.get_config()['fitsdirs'] fd += [(None, '/tmp/FITS')] kc.set_config(cb_id='CBID', fitsdirs=fd) setup_aips_disks() scan = [('track', 4, cat.targets[0])] # Construct a simulated dataset with our # point source at the centre of the field ds = MockDataSet(timestamps={'start_time': 0.0, 'dump_period': dump_period}, subarrays=DEFAULT_SUBARRAYS, spws=spws, dumps=scan, vis=partial(vis, sources=cat), weights=weights, flags=flags) # Try one round of phase only self-cal & Amp+Phase self-cal mfimage_params['maxPSCLoop'] = 1 mfimage_params['maxASCLoop'] = 1 # Run the pipeline pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select, uvblavg_params=uvblavg_params, mfimage_params=mfimage_params) pipeline.execute() ts = telstate.view('selfcal') # Check what we have in telstate agrees with what we put in self.assertEqual(len(ts['antlist']), len(ANTENNA_DESCRIPTIONS)) self.assertEqual(ts['bandwidth'], bandwidth) self.assertEqual(ts['n_chans'], nif) pol_ordering = [pol[0] for pol in sorted(CORR_ID_MAP, key=CORR_ID_MAP.get) if pol[0] == pol[1]] self.assertEqual(ts['pol_ordering'], pol_ordering) if_width = bandwidth / nif center_if = nif // 2 start_freq = centre_freq - (bandwidth / 2.) self.assertEqual(ts['center_freq'], start_freq + if_width * (center_if + 0.5)) self.assertIn(ts.join('selfcal', P_telstate), ts.keys()) self.assertIn(ts.join('selfcal', AP_telstate), ts.keys()) def check_gains_timestamps(gains, expect_timestamps): timestamps = [] for gain, timestamp in gains: np.testing.assert_array_almost_equal(np.abs(gain), 1.0, decimal=3) np.testing.assert_array_almost_equal(np.angle(gain), 0.0) timestamps.append(timestamp) np.testing.assert_array_almost_equal(timestamps, expect_timestamps, decimal=1) # Check phase-only gains and timestamps P_times = np.arange(solPint, ds.end_time.secs, 2. * solPint) check_gains_timestamps(ts.get_range(P_telstate, st=0), P_times) # Check Amp+Phase gains AP_times = np.arange(solAint, ds.end_time.secs, 2. * solAint) check_gains_timestamps(ts.get_range(AP_telstate, st=0), AP_times) # Check with no Amp+Phase self-cal mfimage_params['maxASCLoop'] = 0 telstate.clear() pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select, uvblavg_params=uvblavg_params, mfimage_params=mfimage_params) pipeline.execute() self.assertIn(telstate.join('selfcal', P_telstate), ts.keys()) self.assertNotIn(telstate.join('selfcal', AP_telstate), ts.keys()) # Check with no self-cal mfimage_params['maxPSCLoop'] = 0 telstate.clear() pipeline = pipeline_factory('online', ds, telstate, katdal_select=ka_select, uvblavg_params=uvblavg_params, mfimage_params=mfimage_params) pipeline.execute() self.assertNotIn(telstate.join('selfcal', P_telstate), ts.keys()) self.assertNotIn(telstate.join('selfcal', AP_telstate), ts.keys()) # Cleanup workspace shutil.rmtree(fd[-1][1])
def test_cc_export(self): """Check CC models returned by MFImage """ nchan = 128 spws = [{'centre_freq': .856e9 + .856e9 / 2., 'num_chans': nchan, 'channel_width': .856e9 / nchan, 'sideband': 1, 'band': 'L'}] katdal_select = {'pol': 'HH,VV', 'scans': 'track', 'corrprods': 'cross'} uvblavg_params = {'FOV': 0.2, 'avgFreq': 0, 'chAvg': 1, 'maxInt': 2.0} cat = katpoint.Catalogue() cat.add(katpoint.Target("Amfortas, radec, 0.0, -90.0, (856. 1712. 1. 0. 0.)")) cat.add(katpoint.Target("Klingsor, radec, 0.0, 0.0, (856. 1712. 2. -0.7 0.1)")) cat.add(katpoint.Target("Kundry, radec, 100.0, -35.0, (856. 1712. -1.0 1. -0.1)")) ts = TelescopeState() # Set up a scratch space in /tmp fd = kc.get_config()['fitsdirs'] fd += [(None, '/tmp/FITS')] kc.set_config(cb_id='CBID', fitsdirs=fd) setup_aips_disks() # Point sources with various flux models for targ in cat: scans = [('track', 5, targ)] ds = MockDataSet(timestamps={'start_time': 1.0, 'dump_period': 4.0}, subarrays=DEFAULT_SUBARRAYS, spws=spws, dumps=scans, vis=partial(vis, sources=[targ]), weights=weights, flags=flags) # 100 clean components mfimage_params = {'Niter': 100, 'maxFBW': 0.05, 'FOV': 0.1, 'xCells': 5., 'yCells': 5., 'doGPU': False} pipeline = pipeline_factory('online', ds, ts, katdal_select=katdal_select, uvblavg_params=uvblavg_params, mfimage_params=mfimage_params) pipeline.execute() # Get the fitted CCs from telstate fit_cc = ts.get('target0_clean_components') ts.delete('target0_clean_components') all_ccs = katpoint.Catalogue(fit_cc['components']) # Should have one merged and fitted component self.assertEqual(len(all_ccs), 1) cc = all_ccs.targets[0] out_fluxmodel = cc.flux_model in_fluxmodel = targ.flux_model # Check the flux densities of the flux model in the fitted CC's test_freqs = np.linspace(out_fluxmodel.min_freq_MHz, out_fluxmodel.max_freq_MHz, 5) in_flux = in_fluxmodel.flux_density(test_freqs) out_flux = out_fluxmodel.flux_density(test_freqs) np.testing.assert_allclose(out_flux, in_flux, rtol=1.e-3) # A field with some off axis sources to check positions offax_cat = katpoint.Catalogue() offax_cat.add(katpoint.Target("Titurel, radec, 100.1, -35.05, (856. 1712. 1.1 0. 0.)")) offax_cat.add(katpoint.Target("Gurmenanz, radec, 99.9, -34.95, (856. 1712. 1. 0. 0.)")) scans = [('track', 5, cat.targets[2])] ds = MockDataSet(timestamps={'start_time': 1.0, 'dump_period': 4.0}, subarrays=DEFAULT_SUBARRAYS, spws=spws, dumps=scans, vis=partial(vis, sources=offax_cat), weights=weights, flags=flags) # Small number of CC's and high gain (not checking flux model) mfimage_params['Niter'] = 4 mfimage_params['FOV'] = 0.2 mfimage_params['Gain'] = 0.5 mfimage_params['Robust'] = -5 pipeline = pipeline_factory('online', ds, ts, katdal_select=katdal_select, uvblavg_params=uvblavg_params, mfimage_params=mfimage_params) pipeline.execute() fit_cc = ts.get('target0_clean_components') ts.delete('target0_clean_components') all_ccs = katpoint.Catalogue(fit_cc['components']) # We should have 2 merged clean components for two source positions self.assertEqual(len(all_ccs), 2) # Check the positions of the clean components # These will be ordered by decreasing flux density of the inputs # Position should be accurate to within a 5" pixel delta_dec = np.deg2rad(5./3600.) for model, cc in zip(offax_cat.targets, all_ccs.targets): delta_ra = delta_dec/np.cos(model.radec()[1]) self.assertAlmostEqual(cc.radec()[0], model.radec()[0], delta=delta_ra) self.assertAlmostEqual(cc.radec()[1], model.radec()[1], delta=delta_dec) # Empty the scratch space shutil.rmtree(fd[-1][1])
def test_offline_pipeline(self): """ Tests that a run of the offline continuum pipeline executes. """ # Create Mock dataset and wrap it in a KatdalAdapter ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS, subarrays=DEFAULT_SUBARRAYS, spws=self.spws, dumps=self.scans) # Dummy CB_ID and Product ID and temp fits and aips disks fd = kc.get_config()['fitsdirs'] fd += [(None, os.path.join(os.sep, 'tmp', 'FITS'))] kc.set_config(output_id='OID', cb_id='CBID', fitsdirs=fd) setup_aips_disks() # Create and run the pipeline pipeline = pipeline_factory('offline', ds, katdal_select=self.select, uvblavg_params=self.uvblavg_params, mfimage_params=self.mfimage_params, clobber=CLOBBER.difference({'merge'})) pipeline.execute() # Check that output FITS files exist and have the right names # Now check for files cfg = kc.get_config() cb_id = cfg['cb_id'] out_id = cfg['output_id'] fits_area = cfg['fitsdirs'][-1][1] out_strings = [cb_id, out_id, self.target_name, IMG_CLASS] filename = '_'.join(filter(None, out_strings)) + '.fits' filepath = os.path.join(fits_area, filename) assert os.path.isfile(filepath) _check_fits_headers(filepath) # Remove the tmp/FITS dir shutil.rmtree(fits_area) ds = MockDataSet(timestamps=DEFAULT_TIMESTAMPS, subarrays=DEFAULT_SUBARRAYS, spws=self.spws, dumps=self.scans) setup_aips_disks() # Create and run the pipeline (Reusing the previous data) pipeline = pipeline_factory('offline', ds, katdal_select=self.select, uvblavg_params=self.uvblavg_params, mfimage_params=self.mfimage_params, reuse=True, clobber=CLOBBER) metadata = pipeline.execute() assert_in(filename, metadata['FITSImageFilename']) assert os.path.isfile(filepath) _check_fits_headers(filepath) # Remove FITS temporary area shutil.rmtree(fits_area)
def main(): parser = create_parser() args = parser.parse_args() configure_logging(args) log.info("Reading data with applycal=%s", args.applycal) katdata = katdal.open(args.katdata, applycal=args.applycal, **args.open_args) # Apply the supplied mask to the flags if args.mask: apply_user_mask(katdata, args.mask) # Set up katdal selection based on arguments kat_select = {'pol': args.pols, 'nif': args.nif} if args.targets: kat_select['targets'] = args.targets if args.channels: start_chan, end_chan = args.channels kat_select['channels'] = slice(start_chan, end_chan) # Command line katdal selection overrides command line options kat_select = recursive_merge(args.select, kat_select) # Get band and determine default .yaml files band = katdata.spectral_windows[katdata.spw].band uvblavg_parm_file = args.uvblavg_config if not uvblavg_parm_file: uvblavg_parm_file = os.path.join(os.sep, "obitconf", f"uvblavg_{band}.yaml") log.info('UVBlAvg parameter file for %s-band: %s', band, uvblavg_parm_file) mfimage_parm_file = args.mfimage_config if not mfimage_parm_file: mfimage_parm_file = os.path.join(os.sep, "obitconf", f"mfimage_{band}.yaml") log.info('MFImage parameter file for %s-band: %s', band, mfimage_parm_file) # Get defaults for uvblavg and mfimage and merge user supplied ones uvblavg_args = get_and_merge_args(uvblavg_parm_file, args.uvblavg) mfimage_args = get_and_merge_args(mfimage_parm_file, args.mfimage) # Grab the cal refant from the katdal dataset and default to # it if it is available and hasn't been set by the user. ts = katdata.source.telstate refant = ts.get('cal_refant') if refant is not None and 'refAnt' not in mfimage_args: mfimage_args['refAnt'] = aips_ant_nr(refant) # Try and always average down to 1024 channels if the user # hasn't specified something else num_chans = len(katdata.channels) factor = num_chans // 1024 if 'avgFreq' not in uvblavg_args: if factor > 1: uvblavg_args['avgFreq'] = 1 uvblavg_args['chAvg'] = factor # Get the default config. dc = kc.get_config() # capture_block_id is used to generate AIPS disk filenames capture_block_id = katdata.obs_params['capture_block_id'] if args.reuse: # Set up AIPS disk from specified directory if os.path.exists(args.reuse): aipsdirs = [(None, args.reuse)] log.info('Re-using AIPS data area: %s', aipsdirs[0][1]) reuse = True else: msg = "AIPS disk at '%s' does not exist." % (args.reuse) log.exception(msg) raise IOError(msg) else: # Set up aipsdisk configuration from args.workdir aipsdirs = [(None, os.path.join(args.workdir, capture_block_id + '_aipsdisk'))] log.info('Using AIPS data area: %s', aipsdirs[0][1]) reuse = False # Set up output configuration from args.outputdir fitsdirs = dc['fitsdirs'] # Append outputdir to fitsdirs fitsdirs += [(None, args.outputdir)] log.info('Using output data area: %s', args.outputdir) kc.set_config(aipsdirs=aipsdirs, fitsdirs=fitsdirs, output_id='', cb_id=capture_block_id) setup_aips_disks() pipeline = pipeline_factory('offline', katdata, katdal_select=kat_select, uvblavg_params=uvblavg_args, mfimage_params=mfimage_args, nvispio=args.nvispio, clobber=args.clobber, prtlv=args.prtlv, reuse=reuse) # Execute it pipeline.execute()