def domanage(infilename, mode, versionname, oldname, comment, merge): availablemodes = ['list', 'save', 'restore', 'delete', 'rename'] if mode in availablemodes: namer = filenamer('sdflagmanagerold', infilename) msfilename = namer.configure_name(kind='temp', suffix='ms') # if directories 'msfilename' and 'msfilename.flagversions' already exist accidentally, # rename it for backup (backupms, backupmsfver) sdfverfile = namer.flagversion_name(infilename) msfverfile = namer.flagversion_name(msfilename) if os.path.exists(sdfverfile): move(sdfverfile, msfverfile) sdutil.save(sd.scantable(infilename, False), msfilename, 'MS2', False) task_flagmanager.flagmanager(msfilename, mode, versionname, oldname, comment, merge) if mode == 'restore': backupinfile = namer.configure_name(kind='backup', suffix='asap') # if a directory with the same name as backupinfile exists, rename it for backup move(infilename, backupinfile) sdutil.save(sd.scantable(msfilename, False), infilename, 'ASAP', False) remove(msfilename) move(msfverfile, sdfverfile) # if backupms and backupmsfver exist, rename them back # to msfilename and msfilename.flagversions, respectively else: raise Exception, "Unknown mode" + str(mode)
def domanage(infilename, mode, versionname, oldname, comment, merge): availablemodes = ['list','save','restore','delete','rename'] if mode in availablemodes: namer = filenamer('sdflagmanager',infilename) msfilename = namer.configure_name(kind='temp',suffix='ms') # if directories 'msfilename' and 'msfilename.flagversions' already exist accidentally, # rename it for backup (backupms, backupmsfver) sdfverfile = namer.flagversion_name(infilename) msfverfile = namer.flagversion_name(msfilename) if os.path.exists(sdfverfile): move(sdfverfile, msfverfile) sdutil.save(sd.scantable(infilename,False), msfilename, 'MS2', False) task_flagmanager.flagmanager(msfilename, mode, versionname, oldname, comment, merge) if mode=='restore': backupinfile = namer.configure_name(kind='backup',suffix='asap') # if a directory with the same name as backupinfile exists, rename it for backup move(infilename, backupinfile) sdutil.save(sd.scantable(msfilename,False), infilename, 'ASAP', False) remove(msfilename) move(msfverfile, sdfverfile) # if backupms and backupmsfver exist, rename them back # to msfilename and msfilename.flagversions, respectively else: raise Exception, "Unknown mode" + str(mode)
def initialize_scan(self): if self.splitant: if not is_ms(self.infile): msg = 'input data must be in MS format' raise Exception, msg import datetime dt = datetime.datetime.now() self.temp_prefix = "temp-sdsave" + dt.strftime("%Y%m%d%H%M%S") self.split_infiles = sd.splitant(filename=self.infile, outprefix=self.temp_prefix, overwrite=self.overwrite, getpt=self.getpt) self.scans = [] self.antenna_names = [] for split_infile in self.split_infiles: work_scan = sd.scantable(split_infile, average=False) # scantable selection #work_scan.set_selection(self.get_selector_by_list()) work_scan.set_selection(self.get_selector(work_scan)) self.scans.append(work_scan) # retrieve antenna names self.antenna_names.append(split_infile.split('.')[1]) else: scan = sd.scantable(self.infile, average=False, antenna=self.antenna, getpt=self.getpt) # scantable selection scan.set_selection(self.get_selector(scan)) #self.scan.set_selection(self.get_selector_by_list()) self.scan = scan
def initialize_scan(self): if self.insitu: # update infile storage = sd.rcParams['scantable.storage'] sd.rcParams['scantable.storage'] = 'disk' self.scan = sd.scantable(self.infile,average=False) sd.rcParams['scantable.storage'] = storage else: self.scan = sd.scantable(self.infile,average=False) sel = self.get_selector(self.scan) self.scan.set_selection(sel) self.assert_no_channel_selection_in_spw('warn')
def test_init(self): fname = os.path.join(os.path.dirname(__file__), "data", "MOPS.rpf") st = scantable(fname, average=False) assert_equal(st.ncycle(), 32) st = scantable(fname, average=True) assert_equal(st.ncycle(), 2) st = scantable(fname, unit="Jy") assert_equal(st.get_fluxunit(), "Jy") st = scantable(fname, unit="K") assert_equal(st.get_fluxunit(), "K") assert_raises(RuntimeError, scantable, fname, unit="junk") st = scantable([fname, fname], average=False) assert_equal(st.nscan(), 4)
def initialize_scan(self): isScantable = is_scantable(self.infile) #load the data without time/pol averaging sorg = sd.scantable(self.infile,average=self.scanaverage,antenna=self.antenna) rfset = (self.restfreq != '') and (self.restfreq != []) doCopy = (self.frame != '') or (self.doppler != '') or rfset \ or (self.fluxunit != '' and self.fluxunit != sorg.get_fluxunit()) \ or (self.specunit != '' and self.specunit != sorg.get_unit()) doCopy = doCopy and isScantable # check spw self.assert_no_channel_selection_in_spw('warn') # A scantable selection sel = self.get_selector(sorg) if len(self.raster) > 0: sel = self.select_by_raster(sel, sorg) sorg.set_selection(sel) self.ssel=sel.__str__() del sel # Copy scantable when usign disk storage not to modify # the original table. if doCopy and self.is_disk_storage: self.scan = sorg.copy() else: self.scan = sorg del sorg
def setup(self): s = scantable("data/2011-10-13_1609-MX025.rpf", average=False) # make sure this order is always correct - in can be random sel = selector() sel.set_order = (["SCANNO", "IFNO", "POLNO"]) s.set_selection(sel) self.st = s.copy()
def initialize_scan(self): # load the data without averaging sorg = sd.scantable(self.infile, average=False, antenna=self.antenna) # restorer self.restorer = sdutil.scantable_restore_factory( sorg, self.infile, self.fluxunit, sorg.get_unit(), self.frame, self.doppler, self.restfreq) # Select scan and field sorg.set_selection(self.get_selector(sorg)) # this is bit tricky # set fluxunit here instead of self.set_to_scan # and remove fluxunit attribute to disable additional # call of set_fluxunit in self.set_to_scan self.scan = sdutil.set_fluxunit(sorg, self.fluxunit, self.telescopeparam, False) self.fluxunit_saved = self.fluxunit del self.fluxunit if self.scan: # Restore flux unit in original table before deleting self.restorer.restore() del self.restorer self.restorer = None else: self.scan = sorg
def initialize_scan(self): #load the data without averaging sorg = sd.scantable(self.infile,average=self.scanaverage,antenna=self.antenna) # collect data to restore self.restorer = sdutil.scantable_restore_factory(sorg, self.infile, self.fluxunit, '', #specunit self.frame, self.doppler, self.restfreq) # scantable selection self.set_selection(sorg) # this is bit tricky # set fluxunit here instead of self.set_to_scan # and remove fluxunit attribute to disable additional # call of set_fluxunit in self.set_to_scan self.scan = sdutil.set_fluxunit(sorg, self.fluxunit, self.telescopeparam, False) self.fluxunit_saved = self.fluxunit del self.fluxunit if self.scan: # Restore flux unit in original table before deleting self.restorer.restore() del self.restorer self.restorer = None else: self.scan = sorg
def _verify_saved_flag(self, infile, flagdatafile): # export infile to MS to obtain expected FLAG and FLAG_ROW with temporary_file() as name: s = sd.scantable(infile, average=False) s.save(name, format='MS2') with tbmanager(name) as tb: expected_flag_row = tb.getcol('FLAG_ROW') expected_flag = tb.getcol('FLAG') # actual FLAG and FLAG_ROW with tbmanager(flagdatafile) as tb: flag_row = tb.getcol('FLAG_ROW') flag = tb.getcol('FLAG') # compare self.assertEqual(len(flag_row), len(expected_flag_row), msg='length of FLAG_ROW differ') self.assertEqual(flag.shape, expected_flag.shape, msg='shape of FLAG differ') nrow = len(flag_row) for irow in xrange(nrow): self.assertEqual(flag_row[irow], expected_flag_row[irow], msg='Row %s: FLAG_ROW differ' % (irow)) self.assertTrue( all(flag[:, :, irow].flatten() == expected_flag[:, :, irow].flatten()), msg='Row %s: FLAG differ' % (irow))
def setup(self): s = scantable("data/2011-10-13_1609-MX025.rpf", average=False) # make sure this order is always correct - in can be random sel = selector() sel.set_order = ["SCANNO", "IFNO", "POLNO"] s.set_selection(sel) self.st = s.copy()
def initialize_scan(self): isScantable = is_scantable(self.infile) #load the data without time/pol averaging sorg = sd.scantable(self.infile, average=self.scanaverage, antenna=self.antenna) rfset = (self.restfreq != '') and (self.restfreq != []) doCopy = (self.frame != '') or (self.doppler != '') or rfset \ or (self.fluxunit != '' and self.fluxunit != sorg.get_fluxunit()) \ or (self.specunit != '' and self.specunit != sorg.get_unit()) doCopy = doCopy and isScantable # check spw self.assert_no_channel_selection_in_spw('warn') # A scantable selection sel = self.get_selector(sorg) if len(self.raster) > 0: sel = self.select_by_raster(sel, sorg) sorg.set_selection(sel) self.ssel = sel.__str__() del sel # Copy scantable when usign disk storage not to modify # the original table. if doCopy and self.is_disk_storage: self.scan = sorg.copy() else: self.scan = sorg del sorg
def setup(self): s = scantable('data/B68test.nro', average=False, freqref='VREF') sel = selector() # make sure this order is always correct - it can be random sel.set_order(["SCANNO", "POLNO"]) s.set_selection(sel) self.st = s.copy() del s
def setup(self): pth = os.path.dirname(__file__) s = scantable(os.path.join(pth, "data", "MOPS.rpf"), average=True) sel = selector() # make sure this order is always correct - in can be random sel.set_order(["SCANNO", "POLNO"]) s.set_selection(sel) self.st = s.copy() restfreqs = [86.243] # 13CO-1/0, SiO the two IF self.st.set_restfreqs(restfreqs, "GHz")
def initialize_scan(self): sorg = sd.scantable(self.infile, average=False, antenna=self.antenna) sel = self.get_selector(sorg) sorg.set_selection(sel) del sel # Copy scantable when using disk storage not to modify # the original table. if is_scantable(self.infile) and self.is_disk_storage: self.scan = sorg.copy() else: self.scan = sorg del sorg
def save(self, args): fname = args[0] self.st.save(fname, args[1], True) # do some verification args[2] == True if args[-1]: s = scantable(fname) ds = self.st - s assert_equals(self.st.getpolnos(), s.getpolnos()) assert_equals(self.st.getscannos(), s.getscannos()) assert_equals(self.st.getifnos(), s.getifnos()) assert_equals(self.st.getbeamnos(), s.getbeamnos()) # see if the residual spectra are ~ 0.0 for spec in ds: assert_almost_equals(sum(spec) / len(spec), 0.0, 5)
def initialize_scan(self): # instantiate scantable self.scan = sd.scantable(self.infile, average=False, antenna=self.antenna) # restorer self.restorer = sdutil.scantable_restore_factory(self.scan, self.infile, self.fluxunit, '', # specunit='' self.frame, self.doppler, self.restfreq) # Apply selection self.scan.set_selection(self.get_selector())
def execute(self): if os.path.exists(self.sdfverfile): move(self.sdfverfile, self.msfverfile) sdutil.save(self.scan, self.msfile, 'MS2', False) task_flagmanager.flagmanager(self.msfile, self.mode, self.versionname, self.oldname, self.comment, self.merge) if self.mode == 'restore': # if a directory with the same name as backupinfile exists, rename it for backup del self.scan move(self.infile_abs, self.backupfile) sdutil.save(sd.scantable(self.msfile, False), self.infile_abs, 'ASAP', False) move(self.msfverfile, self.sdfverfile)
def initialize_scan(self): sorg = sd.scantable(self.infile, average=False, antenna=self.antenna) if not (isinstance(sorg, Scantable)): raise Exception, 'infile=%s is not found' % self.infile # A scantable selection #sel = self.get_selector() sel = self.get_selector(sorg) sorg.set_selection(sel) self.assert_no_channel_selection_in_spw('warn') # Copy scantable when usign disk storage not to modify # the original table. if is_scantable(self.infile) and self.is_disk_storage: self.scan = sorg.copy() else: self.scan = sorg del sorg
def initialize_scan(self): sorg=sd.scantable(self.infile,average=False,antenna=self.antenna) if not (isinstance(sorg,Scantable)): raise Exception, 'infile=%s is not found' % self.infile # A scantable selection #sel = self.get_selector() sel = self.get_selector(sorg) sorg.set_selection(sel) self.assert_no_channel_selection_in_spw('warn') # Copy scantable when usign disk storage not to modify # the original table. if is_scantable(self.infile) and self.is_disk_storage: self.scan = sorg.copy() else: self.scan = sorg del sorg
def __test_result(self, infile, result_ret, result_out, rows): casalog.post("result=%s" % (result_ret)) s = sd.scantable(infile, average=False) self.assertTrue(self.fit_ref.has_key(infile)) fit_ref = self.fit_ref[infile] for irow in xrange(len(rows)): row = rows[irow] scanno = s.getscan(row) ifno = s.getif(row) polno = s.getpol(row) key = (scanno, ifno, polno) ref = fit_ref[key] # check nfit nfit = result_ret["nfit"][0] self.assertEqual(nfit, len(ref)) for icomp in xrange(len(ref)): comp = ref[icomp] # check peak peak = result_ret["peak"][irow][icomp][0] diff = abs((peak - comp[0]) / comp[0]) self.assertLess(diff, self.tol) # check center center = result_ret["cent"][irow][icomp][0] self.assertEqual(center, comp[1]) # check fwhm fwhm = result_ret["fwhm"][irow][icomp][0] self.assertEqual(fwhm, comp[2]) for (k, v) in result_out.items(): ref = fit_ref[k] self.assertEqual(len(v), 3 * len(ref)) for icomp in xrange(len(ref)): offset = icomp * 3 _ref = ref[icomp] # check peak diff = abs((v[offset] - _ref[0]) / _ref[0]) self.assertLess(diff, self.tol) # check center self.assertEqual(v[offset + 1], _ref[1]) # check fwhm self.assertEqual(v[offset + 2], _ref[2])
def execute(self): if os.path.exists(self.sdfverfile): move(self.sdfverfile, self.msfverfile) sdutil.save(self.scan, self.msfile, 'MS2', False) task_flagmanager.flagmanager(self.msfile, self.mode, self.versionname, self.oldname, self.comment, self.merge) if self.mode=='restore': # if a directory with the same name as backupinfile exists, rename it for backup del self.scan move(self.infile_abs, self.backupfile) sdutil.save(sd.scantable(self.msfile,False), self.infile_abs, 'ASAP', False) move(self.msfverfile, self.sdfverfile)
def __compile(self): # infiles if isinstance(self.infiles, str): self.infiles = [self.infiles] # scantable for temporary use tmpst = sd.scantable(self.infiles[0], False) # scanlist #self.scans = sdutil._to_list(self.scanlist, int) self.scans = tmpst.parse_idx_selection("SCAN", self.scanno) # pollist #self.pols = sdutil._to_list(self.pollist, int) self.pols = tmpst.parse_idx_selection("POL", self.polno) # spw if (self.spw.strip() == '-1'): self.ifno = tmpst.getif(0) else: masklist = tmpst.parse_spw_selection(self.spw) if len(masklist) == 0: raise ValueError, "Invalid spectral window selection. Selection contains no data." self.ifno = masklist.keys()[0] # outfile self.outname = sdutil.get_default_outfile_name(self.infiles[0], self.outfile, self.suffix) sdutil.assert_outfile_canoverwrite_or_nonexistent(self.outname, 'ASAP', self.overwrite) # nx and ny (self.nx, self.ny) = sdutil.get_nx_ny(self.npix) # cellx and celly (self.cellx, self.celly) = sdutil.get_cellx_celly(self.cell) # map center self.mapcenter = sdutil.get_map_center(self.center) del tmpst
def __compile(self): # infiles if isinstance(self.infiles, str): self.infiles = [self.infiles] # scantable for temporary use tmpst = sd.scantable(self.infiles[0], False) # scanlist #self.scans = sdutil._to_list(self.scanlist, int) self.scans = tmpst.parse_idx_selection("SCAN", self.scanno) # pollist #self.pols = sdutil._to_list(self.pollist, int) self.pols = tmpst.parse_idx_selection("POL", self.polno) # spw if (self.spw.strip() == '-1'): self.ifno = tmpst.getif(0) else: masklist = tmpst.parse_spw_selection(self.spw) if len(masklist) == 0: raise ValueError, "Invalid spectral window selection. Selection contains no data." self.ifno = masklist.keys()[0] # outfile self.outname = sdutil.get_default_outfile_name(self.infiles[0], self.outfile, self.suffix) sdutil.assert_outfile_canoverwrite_or_nonexistent( self.outname, 'ASAP', self.overwrite) # nx and ny (self.nx, self.ny) = sdutil.get_nx_ny(self.npix) # cellx and celly (self.cellx, self.celly) = sdutil.get_cellx_celly(self.cell) # map center self.mapcenter = sdutil.get_map_center(self.center) del tmpst
def initialize_scan(self): sorg = sd.scantable(self.infile,average=False,antenna=self.antenna) if ( abs(self.plotlevel) > 1 ): casalog.post( "Initial Scantable:" ) sorg._summary() # Copy the original data (CAS-3987) if self.is_disk_storage \ and (sdutil.get_abspath(self.project) != sdutil.get_abspath(self.infile)): self.scan = sorg.copy() else: self.scan = sorg # data selection #self.scan.set_selection(self.get_selector()) selector = self.get_selector() if len(self.rasterrow) > 0: selector = self.select_by_raster(selector) self.scan.set_selection(selector)
def initialize_scan(self): sorg = sd.scantable(self.infile, average=False, antenna=self.antenna) if (abs(self.plotlevel) > 1): casalog.post("Initial Scantable:") sorg._summary() # Copy the original data (CAS-3987) if self.is_disk_storage \ and (sdutil.get_abspath(self.project) != sdutil.get_abspath(self.infile)): self.scan = sorg.copy() else: self.scan = sorg # data selection #self.scan.set_selection(self.get_selector()) selector = self.get_selector() if len(self.rasterrow) > 0: selector = self.select_by_raster(selector) self.scan.set_selection(selector)
def initialize_scan(self): sorg = sd.scantable(self.infile, average=False, antenna=self.antenna) if len(self.row.strip()) > 0: self.rowlist = sorg.parse_idx_selection('row', self.row) sel = self.get_selector(sorg) # Copy scantable when using disk storage not to modify # the original table. if is_scantable(self.infile) and self.is_disk_storage: if self.keeprows: # copy first to keep rows self.scan = sorg.copy() self.scan.set_selection(sel) else: sorg.set_selection(sel) self.scan = sorg.copy() else: sorg.set_selection(sel) self.scan = sorg del sel, sorg
def _verify_saved_flag(self, infile, flagdatafile): # export infile to MS to obtain expected FLAG and FLAG_ROW with temporary_file() as name: s = sd.scantable(infile, average=False) s.save(name, format='MS2') with tbmanager(name) as tb: expected_flag_row = tb.getcol('FLAG_ROW') expected_flag = tb.getcol('FLAG') # actual FLAG and FLAG_ROW with tbmanager(flagdatafile) as tb: flag_row = tb.getcol('FLAG_ROW') flag = tb.getcol('FLAG') # compare self.assertEqual(len(flag_row), len(expected_flag_row), msg='length of FLAG_ROW differ') self.assertEqual(flag.shape, expected_flag.shape, msg='shape of FLAG differ') nrow = len(flag_row) for irow in xrange(nrow): self.assertEqual(flag_row[irow], expected_flag_row[irow], msg='Row %s: FLAG_ROW differ'%(irow)) self.assertTrue(all(flag[:,:,irow].flatten() == expected_flag[:,:,irow].flatten()), msg='Row %s: FLAG differ'%(irow))
def init(self): """ Create dummy scantable to work with linefinder. Parameters ---------- None Returns ------- None Notes ----- Should not be directly called. """ # remove old table if it exists if os.path.exists(self.name): os.system('\\rm -rf %s' % (self.name)) # set ip a scantable s = asap._asap.Scantable(False) s._save(self.name) del s # set up a casa table self.tb.open(self.name, nomodify=False) self.tb.addrows(1) if self.nchan != 0: self.tb.putcell('SPECTRA', 0, np.zeros(self.nchan, float)) self.tb.putcell('FLAGTRA', 0, np.zeros(self.nchan, int)) self.tb.close() # make sure dummy scantable is loaded on memory storageorg = asap.rcParams['scantable.storage'] asap.rcParams['scantable.storage'] = 'memory' self.scantab = asap.scantable(self.name, False) os.system('\\rm -rf %s' % (self.name)) asap.rcParams['scantable.storage'] = storageorg
# FLS3 data calibration # this is calibration part of FLS3 data # #Enable ASAP functionality by importing the library casapath=os.environ['CASAPATH'] import asap as sd os.environ['CASAPATH']=casapath print '--Import--' #Load MeasurementSet data into an ASAP scantable (this takes a while) storage_sav=sd.rcParams['scantable.storage'] sd.rc('scantable',storage='disk') # Note this enables handling of large datasets with limited memory #s=sd.scantable('FLS3_all_newcal_SP',false) # the 'false' indicates that no averaging should be done - this is s=sd.scantable(datapath,average=False,getpt=False) # the 'false' indicates that no averaging should be done - this is # always the case for data that hasn't been calibrated importproc=time.clock() importtime=time.time() print '--Split & Save--' # split out the data for the field of interest s0=s.get_scan('FLS3a*') # get all scans with FLS3a source s0.save('FLS3a_HI.asap') # save this data to an ASAP dataset on disk del s # delete scantables that will not be used any further del s0 splitproc=time.clock() splittime=time.time() print '--Calibrate--' s=sd.scantable('FLS3a_HI.asap',average=False) # load in the saved ASAP dataset with FLS3a
# FLS3 data calibration # this is calibration part of FLS3 data # #Enable ASAP functionality by importing the library casapath=os.environ['CASAPATH'] import asap as sd os.environ['CASAPATH']=casapath print '--Import--' #Load MeasurementSet data into an ASAP scantable (this takes a while) storage_sav=sd.rcParams['scantable.storage'] sd.rc('scantable',storage='disk') # Note this enables handling of large datasets with limited memory #s=sd.scantable('FLS3_all_newcal_SP',false) # the 'false' indicates that no averaging should be done - this is s=sd.scantable(datapath,average=false,getpt=false) # the 'false' indicates that no averaging should be done - this is # always the case for data that hasn't been calibrated importproc=time.clock() importtime=time.time() print '--Split & Save--' # split out the data for the field of interest s0=s.get_scan('FLS3a*') # get all scans with FLS3a source s0.save('FLS3a_HI.asap') # save this data to an ASAP dataset on disk del s # delete scantables that will not be used any further del s0 splitproc=time.clock() splittime=time.time() print '--Calibrate--' s=sd.scantable('FLS3a_HI.asap',average=False) # load in the saved ASAP dataset with FLS3a
def initialize_scan(self): self.scanlist = [sd.scantable(f, average=False, antenna=self.antenna) for f in self.infiles]
def initialize_scan(self): self.scan = sd.scantable(self.infile, average=False, antenna=self.antenna)
# Scans: 21-24 Setup 1 HC3N et al # Scans: 25-28 Setup 2 SiO et al import asap as sd #import ASAP package into CASA #GBTIDL #Orion-S (CH3OH line reduction only) #Notes: #scan numbers (zero-based) as compared to GBTIDL #changes made to get to OrionS_rawACSmod #modifications to label sig/ref positions os.environ['CASAPATH'] = casapath s = sd.scantable( 'OrionS_rawACSmod', False) #load the data without averaging # filein,'Orion-S.raw.fits' #s.summary() #summary info # summary # fileout,'Orion-S-reduced.fits' s.set_fluxunit('K') # make 'K' default unit #scal = sd.calps(s, [20,21,22,23]) # Calibrate CH3OH scans # for i=21,24,2 do begin getps,i,ifnum=2,plnum=0,units='Ta*', scal = sd.calps( s, [21, 22, 23, 24] ) # Calibrate CH3OH scans # for i=21,24,2 do begin getps,i,ifnum=2,plnum=0,units='Ta*', del s # remove s from memory # recalculate az/el (NOT needed for GBT data) antennaname = scal.get_antennaname() if (antennaname != 'GBT'): scal.recalc_azel(
def asscantable(self, rowid=None, rasterid=None): s = sd.scantable(self.infile, average=False) sel = self.asselector(rowid=rowid, rasterid=rasterid) s.set_selection(sel) return s
# Scans: 230-236,253-256 Setup 1 H2CO et al # Scans: 237-240,249-252 Setup 2 HC3N et al # Scans: 241-248 Setup 3 SiO et al casapath = os.environ['CASAPATH'] import asap as sd #import ASAP package into CASA #GBTIDL os.environ['CASAPATH'] = casapath #IRC+10216 (HC3N line reduction only) #Notes: #scan numbers (zero-based) as compared to GBTIDL #changes made to get to IRC+10216_rawACSmod # -- merge spectral windows with tolerance s = sd.scantable('IRC+10216_rawACSmod', False)#load the data without averaging # filein,'IRC.raw.fits' #Cannot find any matching Tcal at/near the data timestamp. Set Tcal=0.0 #s.summary() #summary info # summary # fileout,'IRC+10216.reduced.fits' s.set_fluxunit('K') # make 'K' default unit #scal = sd.calnod(s, [236,237,238,239,248,249,250,251]) # Calibrate HC3N scans # for i=237,240,2 do begin getps,i,ifnum=0,plnum=0,units='Ta*', scal = sd.calnod(s, [237,238,239,240,249,250,251,252]) # Calibrate HC3N scans # for i=237,240,2 do begin getps,i,ifnum=0,plnum=0,units='Ta*', del s # remove s from memory # recalculate az/el (NOT needed for GBT data) antennaname = scal.get_antennaname() if ( antennaname != 'GBT'): scal.recalc_azel() # recalculate az/el to # tau=0.09 & accum & getps, i, ifnum=0,plnum=1,units='Ta*', scal.opacity(0.09) # do opacity correction # tau=0.09 & accum & end & ave sel = sd.selector() # Prepare a selection # copy,0,9 sel.set_ifs(17) # select HC3N IF # for i=250,252,2 do begin getps,i,ifnum=0,plnum=0,units='Ta*',
def initialize_scan(self): self.scan = sd.scantable(self.infile_abs, average=False)
def setupClass(self): self.plotter = asapplotter(False) st = scantable("data/MOPS.rpf", average=True) self.st = st.auto_quotient()
def initialize_scan(self): self.scanlist = [ sd.scantable(f, average=False, antenna=self.antenna) for f in self.infiles ]
def execute(self): # insert varnames into expr varnames = self.varnames for i in range(len(self.infiles)): infile_key = 'IN' + str(i) varnames[infile_key] = self.infiles[i] for key in varnames.keys(): regex = re.compile(key) if isinstance(varnames[key], str): self.expr = regex.sub('\"%s\"' % varnames[key], self.expr) else: self.expr = regex.sub("varnames['%s']" % key, self.expr) # default flux unit fluxunit_now = self.fluxunit # set filename list self.__parse() # selector sel = None # actual operation scanlist = {} for i in range(len(self.filenames)): skey = 's' + str(i) isfactor = None # file type check if os.path.isdir(self.filenames[i]): isfactor = False else: f = open(self.filenames[i]) line = f.readline().rstrip('\n') f.close() del f try: isfactor = True vtmp = float(line[0]) del line except ValueError, e: isfactor = False del line if isfactor: # variable scanlist[skey] = sdutil.read_factor_file(self.filenames[i]) else: # scantable thisscan = sd.scantable(self.filenames[i], average=False, antenna=self.antenna) # selector if sel is None: sel = self.get_selector(thisscan) # Apply the selection thisscan.set_selection(sel) if fluxunit_now == '': fluxunit_now = thisscan.get_fluxunit() # copy scantable since convert_flux overwrites spectral data if self.is_disk_storage: casalog.post('copy data to keep original one') s = thisscan.copy() else: s = thisscan sdutil.set_fluxunit(s, self.fluxunit, self.telescopeparam, True) scanlist[skey] = s #regex=re.compile('[\',\"]') regex = re.compile('[\',\"]%s[\',\"]' % self.filenames[i]) #expr=regex.sub('',expr) self.expr = regex.sub("scanlist['%s']" % skey, self.expr)
# Scans: 21-24 Setup 1 HC3N et al # Scans: 25-28 Setup 2 SiO et al import asap as sd #import ASAP package into CASA #GBTIDL #Orion-S (CH3OH line reduction only) #Notes: #scan numbers (zero-based) as compared to GBTIDL #changes made to get to OrionS_rawACSmod #modifications to label sig/ref positions os.environ['CASAPATH'] = casapath s = sd.scantable('OrionS_rawACSmod', False)#load the data without averaging # filein,'Orion-S.raw.fits' #s.summary() #summary info # summary # fileout,'Orion-S-reduced.fits' s.set_fluxunit('K') # make 'K' default unit #scal = sd.calps(s, [20,21,22,23]) # Calibrate CH3OH scans # for i=21,24,2 do begin getps,i,ifnum=2,plnum=0,units='Ta*', scal = sd.calps(s, [21,22,23,24]) # Calibrate CH3OH scans # for i=21,24,2 do begin getps,i,ifnum=2,plnum=0,units='Ta*', del s # remove s from memory # recalculate az/el (NOT needed for GBT data) antennaname = scal.get_antennaname() if ( antennaname != 'GBT'): scal.recalc_azel() # recalculate az/el to # tau=0.09 & accum & getps, i, ifnum=2,plnum=1,units='Ta*', scal.opacity(0.09) # do opacity correction # tau=0.09 & accum & end & ave sel = sd.selector() # Prepare a selection sel.set_ifs(2) # select CH3OH IF scal.set_selection(sel) # get this IF
def execute(self): # insert varnames into expr varnames = self.varnames for i in range(len(self.infiles)): infile_key = 'IN' + str(i) varnames[infile_key] = self.infiles[i] for key in varnames.keys(): regex = re.compile( key ) if isinstance( varnames[key], str ): self.expr = regex.sub( '\"%s\"' % varnames[key], self.expr ) else: self.expr = regex.sub( "varnames['%s']" % key, self.expr ) # default flux unit fluxunit_now = self.fluxunit # set filename list self.__parse() # selector sel = None # actual operation scanlist = {} for i in range(len(self.filenames)): skey='s'+str(i) isfactor = None # file type check if os.path.isdir( self.filenames[i] ): isfactor = False else: f = open( self.filenames[i] ) line = f.readline().rstrip('\n') f.close() del f try: isfactor = True vtmp = float( line[0] ) del line except ValueError, e: isfactor = False del line if isfactor: # variable scanlist[skey] = sdutil.read_factor_file(self.filenames[i]) else: # scantable thisscan=sd.scantable(self.filenames[i],average=False,antenna=self.antenna) # selector if sel is None: sel = self.get_selector(thisscan) # Apply the selection thisscan.set_selection(sel) if fluxunit_now == '': fluxunit_now = thisscan.get_fluxunit() # copy scantable since convert_flux overwrites spectral data if self.is_disk_storage: casalog.post('copy data to keep original one') s = thisscan.copy() else: s = thisscan sdutil.set_fluxunit(s, self.fluxunit, self.telescopeparam, True) scanlist[skey] = s #regex=re.compile('[\',\"]') regex=re.compile('[\',\"]%s[\',\"]' % self.filenames[i]) #expr=regex.sub('',expr) self.expr=regex.sub("scanlist['%s']" % skey ,self.expr)