def _source_info(self): """ Infer MFImage source names and file outputs. For each source, MFImage outputs UV data and a CLEAN file. """ target_indices = self.ka.target_indices[:] # Use output_id for labels label = kc.get_config()['output_id'] # Source names uv_sources = [s["SOURCE"][0].strip() for s in self.ka.uv_source_rows] uv_files = [AIPSPath(name=s, disk=self.disk, label=label, aclass=UV_CLASS, atype="UV") for s in uv_sources] clean_files = [AIPSPath(name=s, disk=self.disk, label=label, aclass=IMG_CLASS, atype="MA") for s in uv_sources] # Find a maximum sequence number referencing unassigned # catalogue numbers for all uv and clean files max_uv_seq_nr = max(next_seq_nr(f) for f in uv_files) max_clean_seq_nr = max(next_seq_nr(f) for f in clean_files) uv_files = [f.copy(seq=max_uv_seq_nr) for f in uv_files] clean_files = [f.copy(seq=max_clean_seq_nr) for f in clean_files] return uv_sources, target_indices, uv_files, clean_files
def test_next_seq_nr(self): """ Test finding the next highest disk sequence number of an AIPS Path """ # Create two AIPS paths, one with with sequence number 10 and 11 p1 = AIPSPath(name='test', disk=1, aclass="klass", seq=10) p2 = p1.copy(seq=p1.seq+1) with obit_context(), file_cleaner([p1, p2]): # Create the first file and test highest sequence number with uv_factory(aips_path=p1, mode="w"): pass self.assertEqual(next_seq_nr(p1), p1.seq+1) # Create the second file and test highest sequence number with uv_factory(aips_path=p2, mode="w"): pass self.assertEqual(next_seq_nr(p1), p1.seq+2) self.assertEqual(next_seq_nr(p2), p2.seq+1)
def execute_implementation(self): result_tuple = self._select_and_infer_files() uv_sources, target_indices, uv_files, clean_files = result_tuple if "mfimage" in self.clobber: self.cleanup_uv_files += uv_files if "clean" in self.clobber: self.cleanup_img_files += clean_files # Update MFImage source selection self.mfimage_params['Sources'] = uv_sources # Find the highest numbered merge file if we are reusing if self.reuse: uv_mp = self.ka.aips_path(aclass='merge', name=kc.get_config()['cb_id']) # Find the merge file with the highest seq # hiseq = next_seq_nr(uv_mp) - 1 # hiseq will be zero if the aipsdisk has no 'merge' file if hiseq == 0: raise ValueError("AIPS disk at '%s' has no 'merge' file to reuse." % (kc.get_config()['aipsdirs'][self.disk - 1][-1])) else: # Get the AIPS entry of the UV data to reuse self.uv_merge_path = uv_mp.copy(seq=hiseq) log.info("Re-using UV data in '%s' from AIPS disk: '%s'", self.uv_merge_path, kc.get_config()['aipsdirs'][self.disk - 1][-1]) merge_uvf = uv_factory(aips_path=self.uv_merge_path, mode='r', nvispio=self.nvispio) merge_nvis = merge_uvf.nvis_from_NX() else: merge_nvis = self._export_and_merge_scans() if "merge" in self.clobber: self.cleanup_uv_files.append(self.uv_merge_path) log.info('There are %s visibilities in the merged file', merge_nvis) if merge_nvis < 1: return {} else: self._run_mfimage(self.uv_merge_path, uv_sources) self._get_wavg_img(clean_files) for uv, clean in zip(uv_files, clean_files): self._attach_SN_tables_to_image(uv, clean) metadata = export_images(clean_files, target_indices, self.odisk, self.ka) return metadata
def _get_wavg_img(self, image_files): """ For each MF image in the list of files, perform a weighted average over the the coarse frequency planes and store it in the first plane of the image, preserving any previous higher order fitting in the subsequent planes. Parameters ---------- image_files : list The images to process (output from MFImage task) """ for img in image_files: with img_factory(aips_path=img, mode="rw") as imf: if imf.exists: tmp_img = img.copy(seq=next_seq_nr(img)) # nterm=1 does weighted average of planes imf.FitMF(tmp_img, nterm=1) tmp_imf = img_factory(aips_path=tmp_img, mode="r") # Get the first (weighted average) plane of tmp_imf. img_plane = tmp_imf.GetPlane() # Stick it into the first plane of imf. imf.PutPlane(img_plane) tmp_imf.Zap()
def _export_and_merge_scans(self): """ 1. Read scans from katdal 2. Export scan data to an AIPS UV file 3. Baseline average the file. 4. Merge averaged AIPS UV file into a merge UV file. """ # The merged UV observation file. We wait until # we have a baseline averaged file with which to condition it merge_uvf = None uv_mp = self.ka.aips_path(aclass='merge', name=kc.get_config()['cb_id']) self.uv_merge_path = uv_mp.copy(seq=next_seq_nr(uv_mp)) global_desc = self.ka.uv_descriptor() global_table_cmds = self.ka.default_table_cmds() # FORTRAN indexing merge_firstVis = 1 # Scan indices scan_indices = [int(si) for si in self.ka.scan_indices] merge_blavg_nvis = 0 # Export each scan individually, baseline averaging and merging it # into the final observation file. # NOTE: Loop over scan indices here rather than using the ka.scans # generator to avoid a conflict with the loop over ka.scans in uv_export. for si in scan_indices: # Select the current scan self.ka.select(scans=si) # Get path, with sequence based on scan index scan_path = self.uv_merge_path.copy(aclass='raw', seq=int(si)) # Get the AIPS source for logging purposes aips_source = self.ka.catalogue[self.ka.target_indices[0]] aips_source_name = aips_source["SOURCE"][0].strip() log.info("Creating '%s'", scan_path) # Create a UV file for the scan and export to it with uv_factory(aips_path=scan_path, mode="w", nvispio=self.nvispio, table_cmds=global_table_cmds, desc=global_desc) as uvf: uv_export(self.ka, uvf, time_step=self.time_step) # Retrieve the single scan index. # The time centroids and interval should be correct # but the visibility indices need to be repurposed scan_uvf = uv_factory(aips_path=scan_path, mode='r', nvispio=self.nvispio) assert len(scan_uvf.tables["AIPS NX"].rows) == 1 nx_row = scan_uvf.tables["AIPS NX"].rows[0].copy() scan_nvis = scan_uvf.nvis_from_NX() # If we should be merging scans # just use the existing scan path and file if self.merge_scans: blavg_path = scan_path blavg_uvf = scan_uvf # Otherwise performing baseline averaging, deriving # a new scan path and file else: # Perform baseline averaging blavg_path = self._blavg_scan(scan_path) blavg_uvf = uv_factory(aips_path=blavg_path, mode='r', nvispio=self.nvispio) # Create the merge UV file, if necessary merge_uvf = self._maybe_create_merge_uvf(merge_uvf, blavg_uvf, global_table_cmds) blavg_nvis = blavg_uvf.nvis_from_NX() merge_blavg_nvis += blavg_nvis # Record something about the baseline averaging process param_str = ', '.join("%s=%s" % (k, v) for k, v in self.uvblavg_params.items()) blavg_history = ("Scan %d '%s' averaged " "%s to %s visiblities. UVBlAvg(%s)" % (si, aips_source_name, scan_nvis, blavg_nvis, param_str)) log.info(blavg_history) merge_uvf.append_history(blavg_history) if blavg_nvis > 0: log.info("Merging '%s' into '%s'", blavg_path, self.uv_merge_path) merge_firstVis = self._copy_scan_to_merge(merge_firstVis, merge_uvf, blavg_uvf, nx_row) else: log.warn("No visibilities to merge for scan %d", si) # Remove scan once merged if 'scans' in self.clobber: log.info("Zapping '%s'", scan_uvf.aips_path) scan_uvf.Zap() else: scan_uvf.Close() # If merging scans for testing purposes, our # baseline averaged file will be the same as the # scan file, which was handled above, so don't # delete again. Otherwise default to # normal clobber handling. if not self.merge_scans: if 'avgscans' in self.clobber: log.info("Zapping '%s'", blavg_uvf.aips_path) blavg_uvf.Zap() else: blavg_uvf.Close() if merge_blavg_nvis == 0: log.error("Final merged file '%s' has ZERO averaged visibilities", self.uv_merge_path) # Write the index table merge_uvf.tables["AIPS NX"].write() # Create an empty calibration table merge_uvf.attach_CL_from_NX_table(self.ka.max_antenna_number) # Close merge file merge_uvf.close() return merge_blavg_nvis
args = create_parser().parse_args() KA = KatdalAdapter(katdal.open(args.katdata)) with obit_context(): # Construct file object aips_path = KA.aips_path(name=args.name, disk=args.disk, aclass=args.aclass, seq=args.seq, dtype="AIPS") # Handle invalid sequence numbers if args.seq is None or args.seq < 1: aips_path.seq = next_seq_nr(aips_path) # Apply the katdal selection KA.select(**args.select) # Fall over on empty selections if not KA.size > 0: raise ValueError("The katdal selection produced an empty dataset" "\n'%s'\n" % pretty(args.select)) # UV file location variables with uv_factory(aips_path=aips_path, mode="w", nvispio=args.nvispio, table_cmds=KA.default_table_cmds(), desc=KA.uv_descriptor()) as uvf: