def delmod(vis=None,otf=None,field=None,scr=None): casalog.origin('delmod') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('delmod', locals()) helper.go() return #Python script try: # only if vis exists... if ((type(vis)==str) & (os.path.exists(vis))): # ... and we are asked to do something... # open without adding anything! cb.open(vis,addcorr=False,addmodel=False) cb.delmod(otf=otf,field=field,scr=scr) cb.close() else: raise Exception, 'Visibility data set not found - please verify the name' #write history ms.open(vis,nomodify=False) ms.writehistory(message='taskname = delmod',origin='delmod') ms.writehistory(message='vis = "'+str(vis)+'"',origin='delmod') ms.writehistory(message='otf = "'+str(otf)+'"',origin='delmod') ms.writehistory(message='scr = "'+str(scr)+'"',origin='delmod') ms.close() except Exception, instance: print '*** Error ***',instance
def clearcal( vis=None, field=None, spw=None, intent=None, addmodel=None, ): casalog.origin('clearcal') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('clearcal', locals()) helper.go() return # Local versions of the tools tblocal = tbtool() cblocal = cbtool() mslocal = mstool() try: # we will initialize scr cols only if we don't create them doinit = False if (type(vis) == str) & os.path.exists(vis): tblocal.open(vis) doinit = tblocal.colnames().count('CORRECTED_DATA') > 0 tblocal.close() # We ignore selection if creating the scratch columns if not doinit: casalog.post( 'Need to create scratch columns; ignoring selection.') cblocal.setvi(old=True, quiet=False) # Old VI for now cblocal.open(vis, addmodel=addmodel) else: raise Exception, \ 'Visibility data set not found - please verify the name' # If necessary (scr col not just created), initialize scr cols if doinit: cblocal.selectvis(field=field, spw=spw, intent=intent) cblocal.initcalset(1) cblocal.close() # Write history to the MS param_names = clearcal.func_code.co_varnames[:clearcal.func_code. co_argcount] param_vals = [eval(p) for p in param_names] casalog.post('Updating the history in the output', 'DEBUG1') write_history(mslocal, vis, 'clearcal', param_names, param_vals, casalog) except Exception, instance: print '*** Error ***', instance
def initweights(vis=None,wtmode=None,dowtsp=None): casalog.origin('initweights') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('initweights', locals()) helper.go() return #Python script try: mycb=cbtool() # only if vis exists... if ((type(vis)==str) & (os.path.exists(vis))): # ... and we are asked to do something... # open without adding anything! mycb.open(vis,compress=False,addcorr=False,addmodel=False) mycb.initweights(wtmode=wtmode,dowtsp=dowtsp) mycb.close() else: raise Exception, 'Visibility data set not found - please verify the name' #write history ms.open(vis,nomodify=False) ms.writehistory(message='taskname = initweights',origin='initweights') ms.writehistory(message='vis = "'+str(vis)+'"',origin='initweights') ms.writehistory(message='wtmode = "'+wtmode+'"',origin='initweights') ms.writehistory(message='dowtsp = "'+str(dowtsp)+'"',origin='initweights') ms.close() except Exception, instance: print '*** Error ***',instance
def setjy(vis=None, field=None, spw=None, selectdata=None, timerange=None, scan=None, intent=None, observation=None, scalebychan=None, standard=None, model=None, modimage=None, listmodels=None, fluxdensity=None, spix=None, reffreq=None, polindex=None, polangle=None, rotmeas=None, fluxdict=None, useephemdir=None, interpolation=None, usescratch=None, ismms=None): """Fills the model column for flux density calibrators.""" casalog.origin('setjy') casalog.post("standard="+standard,'DEBUG1') mylocals = locals() if not listmodels: # listmmodels=T does not require vis sh = SetjyHelper(vis) rstat = sh.resetModelCol() # Take care of the trivial parallelization if ( not listmodels and ParallelTaskHelper.isParallelMS(vis) and usescratch): # jagonzal: We actually operate in parallel when usescratch=True because only # in this case there is a good trade-off between the parallelization overhead # and speed up due to the load involved with MODEL_DATA column creation # Create the default MODEL columns in all sub-MSs to avoid corruption of the MMS # when there are NULL MS selections # # TT: Use ismms is used to change behavior of some of the execption handling # for MMS case. It is a hidden task parameter only modified when input vis # is identified as MMS via SetjyHelper.resetModel(). #sh = SetjyHelper(vis) #rstat = sh.resetModelCol() if rstat: ismms=rstat mylocals['ismms']=ismms #print "mylocals now=",mylocals helper = ParallelTaskHelper('setjy', mylocals) helper._consolidateOutput = False #helper._consolidateOutput = True try: retval = helper.go() # Remove the subMS names from the returned dictionary #print "remove subms names ...retval=",retval if (any(isinstance(v,dict) for v in retval.itervalues())): for subMS in retval: dict_i = retval[subMS] if isinstance(dict_i,dict): retval = dict_i break else: casalog.post("Error in parallel processing of MMS",'SEVERE') retval = False except Exception, instance: retval = False else: casalog.post("Could not initialize MODEL columns in sub-MSs", 'SEVERE') retval = False
def clearcal( vis=None, field=None, spw=None, intent=None, addmodel=None, ): casalog.origin('clearcal') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('clearcal', locals()) helper.go() return # Local versions of the tools tblocal = tbtool() cblocal = cbtool() mslocal = mstool() try: # we will initialize scr cols only if we don't create them doinit = False if (type(vis) == str) & os.path.exists(vis): tblocal.open(vis) doinit = tblocal.colnames().count('CORRECTED_DATA') > 0 tblocal.close() # We ignore selection if creating the scratch columns if not doinit: casalog.post('Need to create scratch columns; ignoring selection.' ) cblocal.open(vis, addmodel=addmodel) else: raise Exception, \ 'Visibility data set not found - please verify the name' # If necessary (scr col not just created), initialize scr cols if doinit: cblocal.selectvis(field=field, spw=spw, intent=intent) cblocal.initcalset(1) cblocal.close() # Write history to the MS param_names = clearcal.func_code.co_varnames[:clearcal.func_code.co_argcount] param_vals = [eval(p) for p in param_names] casalog.post('Updating the history in the output', 'DEBUG1') write_history(mslocal, vis, 'clearcal', param_names, param_vals, casalog) except Exception, instance: print '*** Error ***', instance
def initweights(vis=None, wtmode=None, tsystable=None, gainfield=None, interp=None, spwmap=None, dowtsp=None): casalog.origin('initweights') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('initweights', locals()) helper.go() return #Python script try: mycb = cbtool() # only if vis exists... if ((type(vis) == str) & (os.path.exists(vis))): if wtmode.upper().find("TSYS") > -1: if not os.path.exists(tsystable): raise Exception, 'Tsys calibration table %s not found' % tsystable if len(spwmap) == 0: spwmap = [-1] if interp == "": interp = "linear" # ... and we are asked to do something... # open without adding anything! mycb.open(vis, compress=False, addcorr=False, addmodel=False) mycb.initweights(wtmode=wtmode, dowtsp=dowtsp, tsystable=tsystable, gainfield=gainfield, interp=interp, spwmap=spwmap) mycb.close() else: raise Exception, 'Visibility data set not found - please verify the name' #write history ms.open(vis, nomodify=False) ms.writehistory(message='taskname = initweights', origin='initweights') ms.writehistory(message='vis = "' + str(vis) + '"', origin='initweights') ms.writehistory(message='wtmode = "' + wtmode + '"', origin='initweights') ms.writehistory(message='dowtsp = "' + str(dowtsp) + '"', origin='initweights') ms.close() except Exception, instance: print '*** Error ***', instance
def uvcontsub(vis, field, fitspw, excludechans, combine, solint, fitorder, spw, want_cont): if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('uvcontsub', locals()) helper._consolidateOutput = False retVar = helper.go() # Gather the list of continuum subtraction-SubMSs cont_subMS_list = [] contsub_subMS_list = [] for subMS in retVar: if retVar[subMS]: cont_subMS_list.append(subMS + ".cont") contsub_subMS_list.append(subMS + ".contsub") if len(cont_subMS_list) <= 0: casalog.post("No continuum-subtracted sub-MSs for concatenation", "SEVERE") return False # We have to sort the list because otherwise it # depends on the time the engines dispatches their sub-MSs cont_subMS_list.sort() contsub_subMS_list.sort() # deal with the pointing table auxfile = "uvcontsub_aux2_" + str(time.time()) pnrows = 0 try: mytb.open(vis + '/POINTING') pnrows = mytb.nrows() mytb.close() if (pnrows > 0): shutil.copytree(os.path.realpath(vis + '/POINTING'), auxfile) except Exception, instance: casalog.post( "Error handling POINTING table %s: %s" % (vis + '/POINTING', str(instance)), 'SEVERE') if want_cont: try: virtualconcat(concatvis=helper._arg['vis'] + ".cont", vis=cont_subMS_list, copypointing=False) except Exception, instance: casalog.post( "Error concatenating continuum sub-MSs %s: %s" % (str(cont_subMS_list), str(instance)), 'SEVERE')
def test_split_MMS_weight_corr_sel(self): '''mstransform: Split MMS in parallel. Check WEIGHT shape when selecting correlation''' # Create an MMS in the setup. It creates self.testmms self.createMMS(self.vis, axis='scan', spws='0,1') self.outputms = 'corrRR_LL.mms' mstransform(vis=self.testmms, outputvis=self.outputms, datacolumn='data', correlation='RR,LL',spw='0') self.assertTrue(ParallelTaskHelper.isParallelMS(self.outputms),'Output is not an MMS') mslocal = mstool() mslocal.open(self.outputms) sublist = mslocal.getreferencedtables() self.assertEqual(len(sublist), 2) # Test DD table msmdt = msmdtool() msmdt.open(self.outputms) out_dds = msmdt.datadescids() msmdt.done() ref = [0] for i in out_dds: self.assertEqual(out_dds[i], ref[i]) # The separation axis should be copied to the output MMS in_sepaxis = ph.axisType(self.testmms) out_sepaxis = ph.axisType(self.outputms) self.assertEqual(in_sepaxis, out_sepaxis, 'AxisTypes from input and output MMS do not match') # Check the dimensions of the WEIGHT and SIGMA columns. CAS-6946 out_ws = th.getColShape(self.outputms,'WEIGHT') out_ss = th.getColShape(self.outputms,'SIGMA') self.assertEqual(out_ws[0],'[2]','WEIGHT shape is not correct') self.assertEqual(out_ss[0],'[2]','SIGMA shape is not correct')
def test_split_MMS(self): '''mstransform: Split MMS in parallel''' # Create an MMS in the setup. It creates self.testmms self.createMMS(self.vis, axis='scan', spws='0,1') self.outputms = 'scan30.mms' mstransform(vis=self.testmms, outputvis=self.outputms, datacolumn='data', scan='30') self.assertTrue(ParallelTaskHelper.isParallelMS(self.outputms),'Output is not an MMS') mslocal = mstool() mslocal.open(self.outputms) sublist = mslocal.getreferencedtables() self.assertEqual(len(sublist), 1) # Test DD table msmdt = msmdtool() msmdt.open(self.outputms) out_dds = msmdt.datadescids() msmdt.done() ref = [0,1] for i in out_dds: self.assertEqual(out_dds[i], ref[i]) # The separation axis should be copied to the output MMS in_sepaxis = ph.axisType(self.testmms) out_sepaxis = ph.axisType(self.outputms) self.assertEqual(in_sepaxis, out_sepaxis, 'AxisTypes from input and output MMS do not match')
def test7_bypassParallelProcessing(self): """Test 7: Bypass Parallel Processing mode """ simple_cluster.setDefaults(default_mem_per_engine=33554432) # Prepare MMS self.setUpFile("Four_ants_3C286.mms",'vis') # Create list file text = "mode='unflag'\n"\ "mode='clip' clipminmax=[0,0.1]" filename = 'list_flagdata.txt' self.create_input(text, filename) # step 1: Do unflag+clip flagdata(vis=self.vis, mode='list', inpfile=filename) # step 2: Now do summary ret_dict = flagdata(vis=self.vis, mode='summary') # Check summary self.assertTrue(ret_dict['name']=='Summary') self.assertTrue(ret_dict['spw']['15']['flagged'] == 96284.0) self.assertTrue(ret_dict['spw']['0']['flagged'] == 129711.0) self.assertTrue(ret_dict['spw']['1']['flagged'] == 128551.0) self.assertTrue(ret_dict['spw']['2']['flagged'] == 125686.0) self.assertTrue(ret_dict['spw']['3']['flagged'] == 122862.0) self.assertTrue(ret_dict['spw']['4']['flagged'] == 109317.0) self.assertTrue(ret_dict['spw']['5']['flagged'] == 24481.0) self.assertTrue(ret_dict['spw']['6']['flagged'] == 0) self.assertTrue(ret_dict['spw']['7']['flagged'] == 0) self.assertTrue(ret_dict['spw']['8']['flagged'] == 0) self.assertTrue(ret_dict['spw']['9']['flagged'] == 27422.0) self.assertTrue(ret_dict['spw']['10']['flagged'] == 124638.0) self.assertTrue(ret_dict['spw']['11']['flagged'] == 137813.0) self.assertTrue(ret_dict['spw']['12']['flagged'] == 131896.0) self.assertTrue(ret_dict['spw']['13']['flagged'] == 125074.0) self.assertTrue(ret_dict['spw']['14']['flagged'] == 118039.0) # Remove MMS os.system('rm -rf ' + self.vis) # Restore default values simple_cluster.setDefaults(default_mem_per_engine=512) if not self.bypassParallelProcessing: ParallelTaskHelper.bypassParallelProcessing(0)
def uvcontsub(vis, field, fitspw, excludechans, combine, solint, fitorder, spw, want_cont): if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('uvcontsub', locals()) helper._consolidateOutput = False retVar = helper.go() # Gather the list of continuum subtraction-SubMSs cont_subMS_list = [] contsub_subMS_list = [] for subMS in retVar: if retVar[subMS]: cont_subMS_list.append(subMS + ".cont") contsub_subMS_list.append(subMS + ".contsub") if len(cont_subMS_list) <= 0: casalog.post("No continuum-subtracted sub-MSs for concatenation","SEVERE") return False # We have to sort the list because otherwise it # depends on the time the engines dispatches their sub-MSs cont_subMS_list.sort() contsub_subMS_list.sort() # deal with the pointing table auxfile = "uvcontsub_aux2_"+str(time.time()) pnrows = 0 try: mytb.open(vis+'/POINTING') pnrows = mytb.nrows() mytb.close() if(pnrows>0): shutil.copytree(os.path.realpath(vis+'/POINTING'), auxfile) except Exception, instance: casalog.post("Error handling POINTING table %s: %s" % (vis+'/POINTING',str(instance)),'SEVERE') if want_cont: try: virtualconcat(concatvis=helper._arg['vis'] + ".cont",vis=cont_subMS_list, copypointing=False) except Exception, instance: casalog.post("Error concatenating continuum sub-MSs %s: %s" % (str(cont_subMS_list),str(instance)),'SEVERE')
def initweights(vis=None,wtmode=None,tsystable=None,gainfield=None,interp=None,spwmap=None,dowtsp=None): casalog.origin('initweights') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('initweights', locals()) helper.go() return #Python script try: mycb=cbtool() # only if vis exists... if ((type(vis)==str) & (os.path.exists(vis))): if wtmode.upper().find("TSYS") > -1: if not os.path.exists(tsystable): raise Exception, 'Tsys calibration table %s not found' % tsystable if len(spwmap)==0: spwmap=[-1] if interp=="": interp="linear" # ... and we are asked to do something... # open without adding anything! mycb.open(vis,compress=False,addcorr=False,addmodel=False) mycb.initweights(wtmode=wtmode,dowtsp=dowtsp,tsystable=tsystable,gainfield=gainfield,interp=interp,spwmap=spwmap) mycb.close() else: raise Exception, 'Visibility data set not found - please verify the name' #write history ms.open(vis,nomodify=False) ms.writehistory(message='taskname = initweights',origin='initweights') ms.writehistory(message='vis = "'+str(vis)+'"',origin='initweights') ms.writehistory(message='wtmode = "'+wtmode+'"',origin='initweights') ms.writehistory(message='dowtsp = "'+str(dowtsp)+'"',origin='initweights') ms.close() except Exception, instance: print '*** Error ***',instance
def initweights(vis=None, wtmode=None, dowtsp=None): casalog.origin('initweights') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('initweights', locals()) helper.go() return #Python script try: mycb = cbtool() # only if vis exists... if ((type(vis) == str) & (os.path.exists(vis))): # ... and we are asked to do something... # open without adding anything! mycb.open(vis, compress=False, addcorr=False, addmodel=False) mycb.initweights(wtmode=wtmode, dowtsp=dowtsp) mycb.close() else: raise Exception, 'Visibility data set not found - please verify the name' #write history ms.open(vis, nomodify=False) ms.writehistory(message='taskname = initweights', origin='initweights') ms.writehistory(message='vis = "' + str(vis) + '"', origin='initweights') ms.writehistory(message='wtmode = "' + wtmode + '"', origin='initweights') ms.writehistory(message='dowtsp = "' + str(dowtsp) + '"', origin='initweights') ms.close() except Exception, instance: print '*** Error ***', instance
def delmod(vis=None, otf=None, field=None, scr=None): casalog.origin('delmod') # Do the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): helper = ParallelTaskHelper('delmod', locals()) helper.go() return #Python script try: # only if vis exists... if ((type(vis) == str) & (os.path.exists(vis))): # ... and we are asked to do something... # open without adding anything! _cb.open(vis, addcorr=False, addmodel=False) _cb.delmod(otf=otf, field=field, scr=scr) _cb.close() else: raise Exception, 'Visibility data set not found - please verify the name' # Write history to MS try: param_names = delmod.func_code.co_varnames[:delmod.func_code. co_argcount] param_vals = [eval(p) for p in param_names] write_history(mstool(), vis, 'delmod', param_names, param_vals, casalog) except Exception, instance: casalog.post("*** Error \'%s\' updating HISTORY" % (instance), 'WARN') except Exception, instance: print '*** Error ***', instance
def test_mpi4casa_flagdata_list_return_multithreading(self): """Test flagdata summary in multithreading mode""" # First run flagdata sequentially bypassParallelProcessing = ParallelTaskHelper.getBypassParallelProcessing() ParallelTaskHelper.bypassParallelProcessing(2) res = flagdata(vis=self.vis, mode='summary') ParallelTaskHelper.bypassParallelProcessing(bypassParallelProcessing) # Make a copy of the input MMS for each flagdata instance os.system("cp -r %s %s" % (self.vis,self.vis2)) os.system("cp -r %s %s" % (self.vis,self.vis3)) ParallelTaskHelper.setMultithreadingMode(True) # Set up workers cmd1 = "flagdata(vis='%s', mode='summary')" % (self.vis) worker1 = ParallelTaskWorker(cmd1) cmd2 = "flagdata(vis='%s', mode='summary')" % (self.vis2) worker2 = ParallelTaskWorker(cmd2) cmd3 = "flagdata(vis='%s', mode='summary')" % (self.vis3) worker3 = ParallelTaskWorker(cmd3) # Spawn worker threads worker1.start() worker2.start() worker3.start() # Get resulting summary ict from each worker res1 = worker1.getResult() res2 = worker2.getResult() res3 = worker3.getResult() ParallelTaskHelper.setMultithreadingMode(False) # Compare return summary dicts with the one generated with a sequential run self.assertEqual(res1,res, "flagdata dictionary does not match for the first flagdata run") self.assertEqual(res2,res, "flagdata dictionary does not match for the second flagdata run") self.assertEqual(res3,res, "flagdata dictionary does not match for the third flagdata run")
def _createCalMSCommand(self): ''' Create a command which will generate a MS with only those subMSs in the selected calibration ''' self._selectMS(True) self._calScanList = self._getScanList() # Now create the command dict. for this calCmd = copy.copy(self._arg) calCmd['createmms'] = False calCmd['calmsselection'] = 'none' calCmd['scan'] = ParallelTaskHelper.listToCasaString(self._calScanList) if self._arg['createmms']: calCmd['outputvis'] = self.dataDir + '/%s.cal.ms' % self.outputBase else: calCmd['outputvis'] = self._arg['calmsname'] self._executionList.append(JobData(self._taskName, calCmd))
def initweights(vis=None,wtmode=None,tsystable=None,gainfield=None,interp=None,spwmap=None,dowtsp=None): casalog.origin('initweights') # Do the trivial parallelization if ParallelTaskHelper.isMPIEnabled() and ParallelTaskHelper.isParallelMS(vis): tsystable = ParallelTaskHelper.findAbsPath(tsystable) helper = ParallelTaskHelper('initweights', locals()) helper.go() # Write history to MS. try: param_names = initweights.func_code.co_varnames[:initweights.func_code.co_argcount] param_vals = [eval(p) for p in param_names] casalog.post('Updating the history in the output', 'DEBUG1') write_history(ms, vis, 'initweights', param_names, param_vals, casalog) except Exception, instance: casalog.post("*** Error \'%s\' updating HISTORY" % (instance), 'WARN') return
imt = imtool() imt.open(vis, usescratch=False) imt.calcuvw(fldids, refcode='J2000', reuse=False) imt.close() imt = None if((oldrefcol!=[]) and (thenewref>0)): tbt.open(vis+'/FIELD', nomodify=False) tbt.putcol('PhaseDir_Ref', oldrefcol) tbt.close() else: casalog.post("UVW coordinates not changed.", 'NORMAL') if (ParallelTaskHelper.isParallelMS(vis)): casalog.post("Tidying up the MMS subtables ...", 'NORMAL') ParallelTaskHelper.restoreSubtableAgreement(vis) mst = None tbt = None return True except Exception, instance: mst = None tbt = None imt = None casalog.post("*** Error \'%s\' " % (instance), 'SEVERE') return False
def __init__(self, args = {}): ParallelTaskHelper.__init__(self,'oldpartition', args) self._calScanList = None self._selectionScanList = None self._msTool = None self._tbTool = None
def initialize(self): ''' This method will prepare the system for working, in particular: * Open the input MS as a msTool * Create the Output Directory and the Data subdirectory * Populate the global data selection filter (if necessary) ''' ParallelTaskHelper.initialize(self) for (arg,value) in self._arg.items(): if value == None: self._arg[arg] = '' # Since we are here we know that the outputvis should not be '' if self._arg['outputvis'] == '': raise ValueError, "An output vis was required." if os.path.exists(self._arg['outputvis']): raise ValueError, \ "Vis directory for output (%s) already exists" %\ self._arg['outputvis'] # Find the absolute path to the outputvis self._arg['outputvis'] = os.path.abspath(self._arg['outputvis']) outputPath, self.outputBase = os.path.split(self._arg['outputvis']) try: self.outputBase = self.outputBase[:self.outputBase.rindex('.')] except ValueError: # outputBase must not have a trailing . pass self.dataDir = outputPath + '/' + self.outputBase+'.data' if self._arg['createmms']: if os.path.exists(self.dataDir): shutil.rmtree(self.dataDir) os.mkdir(self.dataDir) ## handle the POINTING and SYSCAL tables ## self.ptab = self._arg['vis']+'/POINTING' self.stab = self._arg['vis']+'/SYSCAL' mytb = tbtool() # test their properties self.pointingisempty = True self.makepointinglinks = False self.pwriteaccess = True mytb.open(self.ptab) self.pointingisempty = (mytb.nrows()==0) mytb.close() self.makepointinglinks = not self.pointingisempty self.pwriteaccess = True self.syscalisempty = True self.makesyscallinks = False self.swriteaccess = True if(os.path.exists(self.stab)): # syscal is optional mytb.open(self.stab) self.syscalisempty = (mytb.nrows()==0) mytb.close() self.makesyscallinks = not self.syscalisempty if not self.pointingisempty: if os.access(os.path.dirname(self.ptab), os.W_OK) \ and not os.path.islink(self.ptab): # move to datadir os.system('mv '+self.ptab+' '+self.dataDir) # create empty copy in original place so partition does not need to deal with it mytb.open(self.dataDir+'/POINTING') tmpp = mytb.copy(newtablename=self.ptab, norows=True) tmpp.close() mytb.close() else: self.pwriteaccess = False if not self.syscalisempty: if os.access(os.path.dirname(self.stab), os.W_OK) \ and not os.path.islink(self.stab): # move to datadir os.system('mv '+self.stab+' '+self.dataDir) # create empty copy in original place so partition does not need to deal with it mytb.open(self.dataDir+'/SYSCAL') tmpp = mytb.copy(newtablename=self.stab, norows=True) tmpp.close() mytb.close() else: self.swriteaccess = False
def flagdata( vis, mode, autocorr, # mode manual parameter inpfile, # mode list parameters reason, tbuff, spw, # data selection parameters field, antenna, uvrange, timerange, correlation, scan, intent, array, observation, feed, clipminmax, # mode clip parameters datacolumn, clipoutside, channelavg, timeavg, timebin, clipzeros, quackinterval, # mode quack parameters quackmode, quackincrement, tolerance, # mode shadow parameter addantenna, lowerlimit, # mode elevation parameters upperlimit, ntime, # mode tfcrop combinescans, timecutoff, freqcutoff, timefit, freqfit, maxnpieces, flagdimension, usewindowstats, halfwin, extendflags, winsize, # rflag parameters timedev, freqdev, timedevscale, freqdevscale, spectralmax, spectralmin, extendpols, # mode extend growtime, growfreq, growaround, flagneartime, flagnearfreq, minrel, # mode summary maxrel, minabs, maxabs, spwchan, spwcorr, basecnt, fieldcnt, name, action, # run or not the tool display, flagbackup, savepars, # save the current parameters to FLAG_CMD or to a file cmdreason, # reason to save to flag cmd outfile, # output file to save flag commands writeflags # HIDDEN parameter ): # # Task flagdata # Flags data from an MS or calibration table based on data selection in various ways casalog.origin('flagdata') if (action == 'none' or action == '' or action == 'calculate'): flagbackup = False # SMC: moved the flagbackup to before initializing the cluster. # Note that with this change, a flag backup will be created even if # an error happens that prevents the flagger tool from running. if (mode != 'summary' and flagbackup): casalog.post('Backup original flags before applying new flags') fh.backupFlags(aflocal=None, msfile=vis, prename='flagdata') # Set flagbackup to False because only the controller # should create a backup flagbackup = False # Initialize the helper class orig_locals = locals() FHelper = FlagHelper() # Check if vis is a MS, MMS or cal table: # typevis = 1 --> cal table # typevis = 0 --> MS # typevis = 2 --> MMS iscal = False typevis = fh.isCalTable(vis) if typevis == 1: iscal = True # ***************** Input is MMS -- Parallel Processing *********************** if typevis == 2 and action != '' and action != 'none': # Create a temporary input file with .tmp extension. # Use this file for all the processing from now on. if (isinstance(inpfile,str) and inpfile != '') or \ (isinstance(inpfile, list) and os.path.isfile(inpfile[0])): inpfile = FHelper.setupInputFile(inpfile) if inpfile != None: orig_locals['inpfile'] = inpfile if outfile != '': outfile = os.path.abspath(outfile) orig_locals['outfile'] = outfile if isinstance(addantenna, str) and addantenna != '': addantenna = os.path.abspath(addantenna) orig_locals['addantenna'] = addantenna if isinstance(timedev, str) and timedev != '': timedev = os.path.abspath(timedev) orig_locals['timedev'] = timedev if isinstance(freqdev, str) and freqdev != '': freqdev = os.path.abspath(freqdev) orig_locals['freqdev'] = freqdev FHelper.__init__(orig_locals) # For tests only # FHelper.bypassParallelProcessing(1) FHelper.setupCluster('flagdata') # (CAS-4119): Override summary minabs,maxabs,minrel,maxrel # so that it is done after consolidating the summaries # By-pass options to filter summary filterSummary = False if ((mode == 'summary') and ((minrel != 0.0) or (maxrel != 1.0) or (minabs != 0) or (maxabs != -1))): filterSummary = True myms = mstool() myms.open(vis) subMS_list = myms.getreferencedtables() myms.close() if (minrel != 0.0): minreal_dict = create_arg_dict(subMS_list, 0.0) FHelper.override_arg('minrel', minreal_dict) if (maxrel != 1.0): maxrel_dict = create_arg_dict(subMS_list, 1.0) FHelper.override_arg('maxrel', maxrel_dict) if (minabs != 0): minabs_dict = create_arg_dict(subMS_list, 0) FHelper.override_arg('minabs', minabs_dict) if (maxabs != -1): maxabs_dict = create_arg_dict(subMS_list, -1) FHelper.override_arg('maxabs', maxabs_dict) # By-pass options to filter summary if savepars: myms = mstool() myms.open(vis) subMS_list = myms.getreferencedtables() myms.close() savepars_dict = create_arg_dict(subMS_list, False) FHelper.override_arg('savepars', savepars_dict) # Execute the parallel engines retVar = FHelper.go() # In async mode return the job ids if ParallelTaskHelper.getAsyncMode(): return retVar else: # Filter summary at MMS level if (mode == 'summary'): if filterSummary: retVar = filter_summary(retVar, minrel, maxrel, minabs, maxabs) return retVar # Save parameters at MMS level elif savepars: action = 'none' else: return retVar summary_stats = {} # if pCASA.is_mms(vis): # pCASA.execute("flagdata", orig_locals) # return # ***************** Input is a normal MS/cal table **************** # Create local tools aflocal = casac.agentflagger() mslocal = mstool() try: # Verify the ntime value newtime = 0.0 if type(ntime) == float or type(ntime) == int: if ntime <= 0: raise Exception, 'Parameter ntime cannot be < = 0' else: # units are seconds newtime = float(ntime) elif type(ntime) == str: if ntime == 'scan': # iteration time step is a scan newtime = 0.0 else: # read the units from the string qtime = qa.quantity(ntime) if qtime['unit'] == 'min': # convert to seconds qtime = qa.convert(qtime, 's') elif qtime['unit'] == '': qtime['unit'] = 's' # check units if qtime['unit'] == 's': newtime = qtime['value'] else: casalog.post( 'Cannot convert units of ntime. Will use default 0.0s', 'WARN') casalog.post( "New ntime is of type %s and value %s" % (type(newtime), newtime), 'DEBUG') # Open the MS and attach it to the tool if ((type(vis) == str) & (os.path.exists(vis))): aflocal.open(vis, newtime) else: raise Exception, 'Visibility data set not found - please verify the name' # Get the parameters for the mode agent_pars = {} # By default, write flags to the MS writeflags = True # Only the apply action writes to the MS # action=apply --> write to the MS # action=calculate --> do not write to the MS # action='' --> do not run the tool and do not write to the MS if action != 'apply': writeflags = False # Default mode if mode == '' or mode == 'manualflag': mode = 'manual' # Read in the list of commands # Make a dictionary of the input commands. Select by reason if requested flagcmd = {} if mode == 'list': casalog.post('List mode is active') doPadding = True try: # If tbuff is requested, read and Parse if tbuff == 0.0 or tbuff == [] or tbuff == None: doPadding = False if doPadding: casalog.post('Will apply time buffer padding') # inpfile is a file if isinstance(inpfile, str): inpfile = [inpfile] # read in the list and do a simple parsing to apply tbuff flaglist = fh.readAndParse(inpfile, tbuff) else: # inpfile is a file if isinstance(inpfile, str) and os.path.isfile(inpfile): flaglist = fh.readFile(inpfile) nlines = len(flaglist) casalog.post('Read %s command(s) from file: %s' % (nlines, inpfile)) # inpfile is a list of files elif isinstance(inpfile, list) and os.path.isfile( inpfile[0]): flaglist = fh.readFiles(inpfile) # Python list of strings elif isinstance(inpfile, list): flaglist = inpfile else: raise Exception, 'Unsupported input list of flag commands or input file does not exist' # Parse and create a dictionary flagcmd = fh.parseDictionary(flaglist, reason) # Validate the dictionary. # IMPORTANT: if any parameter changes its type, the following # function needs to be updated. The same if any new parameter is # added or removed from the task fh.evaluateFlagParameters(flagcmd, orig_locals) # List of flag commands in dictionary vrows = flagcmd.keys() casalog.post('%s' % flagcmd, 'DEBUG1') except Exception, instance: casalog.post('%s' % instance, 'ERROR') raise Exception, 'Error reading the input list. Make sure the syntax used in the list '\ 'follows the rules given in the inline help of the task.' casalog.post('Selected ' + str(vrows.__len__()) + ' commands from combined input list(s) ') elif mode == 'manual': agent_pars['autocorr'] = autocorr casalog.post('Manual mode is active')
def setupCluster(self, thistask=''): '''Get a simple_cluster to execute this task''' if thistask == '': thistask = 'flagdata' ParallelTaskHelper.__init__(self, task_name=thistask, args=self.__args)
def flagdata(vis, mode, autocorr, # mode manual parameter inpfile, # mode list parameters reason, tbuff, spw, # data selection parameters field, antenna, uvrange, timerange, correlation, scan, intent, array, observation, feed, clipminmax, # mode clip parameters datacolumn, clipoutside, channelavg, chanbin, timeavg, timebin, clipzeros, quackinterval, # mode quack parameters quackmode, quackincrement, tolerance, # mode shadow parameter addantenna, lowerlimit, # mode elevation parameters upperlimit, ntime, # mode tfcrop combinescans, timecutoff, freqcutoff, timefit, freqfit, maxnpieces, flagdimension, usewindowstats, halfwin, extendflags, winsize, # rflag parameters timedev, freqdev, timedevscale, freqdevscale, spectralmax, spectralmin, extendpols, # mode extend growtime, growfreq, growaround, flagneartime, flagnearfreq, minrel, # mode summary maxrel, minabs, maxabs, spwchan, spwcorr, basecnt, fieldcnt, name, action, # run or not the tool display, flagbackup, savepars, # save the current parameters to FLAG_CMD or to a file cmdreason, # reason to save to flag cmd outfile, # output file to save flag commands overwrite, # overwrite the outfile file writeflags # HIDDEN parameter ): # # Task flagdata # Flags data from an MS or calibration table based on data selection in various ways casalog.origin('flagdata') if (action == 'none' or action=='' or action=='calculate'): flagbackup = False # SMC: moved the flagbackup to before initializing the cluster. # Note that with this change, a flag backup will be created even if # an error happens that prevents the flagger tool from running. if (mode != 'summary' and flagbackup): casalog.post('Backup original flags before applying new flags') fh.backupFlags(aflocal=None, msfile=vis, prename='flagdata') # Set flagbackup to False because only the controller # should create a backup flagbackup = False # Initialize the helper class orig_locals = copy.deepcopy(locals()) FHelper = FlagHelper() # Check if vis is a MS, MMS or cal table: # typevis = 1 --> cal table # typevis = 0 --> MS # typevis = 2 --> MMS iscal = False typevis = fh.isCalTable(vis) if typevis == 1: iscal = True # ***************** Input is MMS -- Parallel Processing *********************** if FHelper.isMPIEnabled() and typevis == 2 and action != '' and action != 'none': # Create a temporary input file with .tmp extension. # Use this file for all the processing from now on. if (isinstance(inpfile,str) and inpfile != '') or \ (isinstance(inpfile, list) and os.path.isfile(inpfile[0])): inpfile = FHelper.setupInputFile(inpfile) if inpfile != None: orig_locals['inpfile'] = inpfile if outfile != '': outfile = os.path.abspath(outfile) orig_locals['outfile'] = outfile if isinstance(addantenna, str) and addantenna != '': addantenna = os.path.abspath(addantenna) orig_locals['addantenna'] = addantenna if isinstance(timedev, str) and timedev != '': timedev = os.path.abspath(timedev) orig_locals['timedev'] = timedev if isinstance(freqdev, str) and freqdev != '': freqdev = os.path.abspath(freqdev) orig_locals['freqdev'] = freqdev FHelper.__init__(orig_locals) # For tests only # FHelper.bypassParallelProcessing(1) FHelper.setupCluster('flagdata') # (CAS-4119): Override summary minabs,maxabs,minrel,maxrel # so that it is done after consolidating the summaries # By-pass options to filter summary filterSummary = False if ((mode == 'summary') and ((minrel != 0.0) or (maxrel != 1.0) or (minabs != 0) or (maxabs != -1))): filterSummary = True myms = mstool() myms.open(vis) subMS_list = myms.getreferencedtables() myms.close() if (minrel != 0.0): minreal_dict = create_arg_dict(subMS_list,0.0) FHelper.override_arg('minrel',minreal_dict) if (maxrel != 1.0): maxrel_dict = create_arg_dict(subMS_list,1.0) FHelper.override_arg('maxrel',maxrel_dict) if (minabs != 0): minabs_dict = create_arg_dict(subMS_list,0) FHelper.override_arg('minabs',minabs_dict) if (maxabs != -1): maxabs_dict = create_arg_dict(subMS_list,-1) FHelper.override_arg('maxabs',maxabs_dict) # By-pass options to filter summary if savepars: myms = mstool() myms.open(vis) subMS_list = myms.getreferencedtables() myms.close() savepars_dict = create_arg_dict(subMS_list,False) FHelper.override_arg('savepars',savepars_dict) # Execute the parallel engines retVar = FHelper.go() # In async mode return the job ids if ParallelTaskHelper.getAsyncMode(): return retVar else: # Filter summary at MMS level if (mode == 'summary'): if filterSummary: retVar = filter_summary(retVar,minrel,maxrel,minabs,maxabs) return retVar # Save parameters at MMS level elif savepars: action = 'none' else: return retVar summary_stats={}; # ***************** Input is a normal MS/cal table **************** # Create local tools aflocal = casac.agentflagger() mslocal = mstool() try: # Verify the ntime value newtime = 0.0 if type(ntime) == float or type(ntime) == int: if ntime <= 0: raise Exception, 'Parameter ntime cannot be < = 0' else: # units are seconds newtime = float(ntime) elif type(ntime) == str: if ntime == 'scan': # iteration time step is a scan newtime = 0.0 else: # read the units from the string qtime = qa.quantity(ntime) if qtime['unit'] == 'min': # convert to seconds qtime = qa.convert(qtime, 's') elif qtime['unit'] == '': qtime['unit'] = 's' # check units if qtime['unit'] == 's': newtime = qtime['value'] else: casalog.post('Cannot convert units of ntime. Will use default 0.0s', 'WARN') casalog.post("New ntime is of type %s and value %s"%(type(newtime),newtime), 'DEBUG') # Open the MS and attach it to the tool if ((type(vis) == str) & (os.path.exists(vis))): aflocal.open(vis, newtime) else: raise Exception, 'Visibility data set not found - please verify the name' # Get the parameters for the mode agent_pars = {} # By default, write flags to the MS writeflags = True # Only the apply action writes to the MS # action=apply --> write to the MS # action=calculate --> do not write to the MS # action='' --> do not run the tool and do not write to the MS if action != 'apply': writeflags = False # Default mode if mode == '' or mode == 'manualflag': mode = 'manual' # Read in the list of commands # Make a dictionary of the input commands. Select by reason if requested flagcmd = {} if mode == 'list': casalog.post('List mode is active') doPadding = True try: # If tbuff is requested, read and Parse if tbuff == 0.0 or tbuff == [] or tbuff == None: doPadding = False if doPadding: casalog.post('Will apply time buffer padding') # inpfile is a file if isinstance(inpfile, str): inpfile = [inpfile] # read in the list and do a simple parsing to apply tbuff flaglist = fh.readAndParse(inpfile, tbuff) else: # inpfile is a file if isinstance(inpfile, str) and os.path.isfile(inpfile): flaglist = fh.readFile(inpfile) nlines = len(flaglist) casalog.post('Read %s command(s) from file: %s'%(nlines, inpfile)) # inpfile is a list of files elif isinstance(inpfile, list) and os.path.isfile(inpfile[0]): flaglist = fh.readFiles(inpfile) # Python list of strings elif isinstance(inpfile, list): flaglist = inpfile else: raise Exception, 'Unsupported input list of flag commands or input file does not exist' # Parse and create a dictionary flagcmd = fh.parseDictionary(flaglist, reason) # Validate the dictionary. # IMPORTANT: if any parameter changes its type, the following # function needs to be updated. The same if any new parameter is # added or removed from the task fh.evaluateFlagParameters(flagcmd,orig_locals) # List of flag commands in dictionary vrows = flagcmd.keys() casalog.post('%s'%flagcmd,'DEBUG1') except Exception, instance: casalog.post('%s'%instance,'ERROR') raise Exception, 'Error reading the input list. Make sure the syntax used in the list '\ 'follows the rules given in the inline help of the task.' casalog.post('Selected ' + str(vrows.__len__()) + ' commands from combined input list(s) ') elif mode == 'manual': agent_pars['autocorr'] = autocorr casalog.post('Manual mode is active')
def setupCluster(self, thistask=''): '''Get a cluster to execute this task''' if thistask == '': thistask = 'flagdata' ParallelTaskHelper.__init__(self, task_name=thistask, args=self.__args)
4. It gets the right answer for a known line + 0th order continuum, even when fitorder = 4. ''' datapath = os.environ.get('CASAPATH').split()[0] + '/data/regression/unittest' uvcdatadir = 'uvcontsub' # Pick up alternative data directory to run tests on MMSs testmms = False if os.environ.has_key('TEST_DATADIR'): testmms = True DATADIR = str(os.environ.get('TEST_DATADIR')) if os.path.isdir(DATADIR): datapath = DATADIR if os.environ.has_key('BYPASS_PARALLEL_PROCESSING'): ParallelTaskHelper.bypassParallelProcessing(1) #Commented out for refactoring (eliminated test_split dependence) #class UVContChecker(SplitChecker): # """ # Base class for uvcontsub unit testing. # """ # need_to_initialize = True # records = {} # # def do_split(self, corrsel): # """ # This is only called do_split because it comes from SplitChecker. # run_task (uvcontsub in this case) would have been a better name. # """ # record = {}
def concat(vislist, concatvis, freqtol, dirtol, respectname, timesort, copypointing, visweightscale): """concatenate visibility datasets The list of data sets given in the vis argument are chronologically concatenated into an output data set in concatvis, i.e. the data sets in vis are first ordered by the time of their earliest integration and then concatenated. If there are fields whose direction agrees within the direction tolerance (parameter dirtol), the actual direction in the resulting, merged output field will be the one from the chronologically first input MS. If concatvis already exists (e.g., it is the same as the first input data set), then the other input data sets will be appended to the concatvis data set. There is no limit to the number of input data sets. If none of the input data sets have any scratch columns (model and corrected columns), none are created in the concatvis. Otherwise these columns are created on output and initialized to their default value (1 in model column, data in corrected column) for those data with no input columns. Spectral windows for each data set with the same chanelization, and within a specified frequency tolerance of another data set will be combined into one spectral window. A field position in one data set that is within a specified direction tolerance of another field position in any other data set will be combined into one field. The field names need not be the same---only their position is used. Each appended dataset is assigned a new observation id (provided the entries in the observation table are indeed different). Keyword arguments: vis -- Name of input visibility files to be combined default: none; example: vis = ['src2.ms','ngc5921.ms','ngc315.ms'] concatvis -- Name of visibility file that will contain the concatenated data note: if this file exits on disk then the input files are added to this file. Otherwise the new file contains the concatenated data. Be careful here when concatenating to an existing file. default: none; example: concatvis='src2.ms' example: concatvis='outvis.ms' other examples: concat(vis=['src2.ms','ngc5921.ms'], concatvis='src2.ms') will concatenate 'ngc5921.ms' into 'src2.ms', and the original src2.ms is lost concat(vis=['src2.ms','ngc5921.ms'], concatvis='out.ms') will concatenate 'ngc5921.ms' and 'src2.ms' into a file named 'out.ms'; the original 'ngc5921.ms' and 'src2.ms' are untouched. concat(vis=['v1.ms','v2.ms'], concatvis = 'vall.ms') then concat(vis=['v3.ms','v4.ms'], concatvis = 'vall.ms') vall.ms will contains v1.ms+v2.ms+v3.ms+v4.ms Note: run flagmanager to save flags in the concatvis freqtol -- Frequency shift tolerance for considering data to be in the same spwid. The number of channels must also be the same. default: '' == 1 Hz example: freqtol='10MHz' will not combine spwid unless they are within 10 MHz. Note: This option is useful to combine spectral windows with very slight frequency differences caused by Doppler tracking, for example. dirtol -- Direction shift tolerance for considering data as the same field default: '' == 1 mas (milliarcsec) example: dirtol='1.arcsec' will not combine data for a field unless their phase center differ by less than 1 arcsec. If the field names are different in the input data sets, the name in the output data set will be the first relevant data set in the list. respectname -- If true, fields with a different name are not merged even if their direction agrees (within dirtol) default: False timesort -- If true, the output visibility table will be sorted in time. default: false. Data in order as read in. example: timesort=true Note: There is no constraint on data that is simultaneously observed for more than one field; for example multi-source correlation of VLBA data. copypointing -- Make a proper copy of the POINTING subtable (can be time consuming). If False, the result is an empty POINTING table. default: True visweightscale -- The weights of the individual MSs will be scaled in the concatenated output MS by the factors in this list. Useful for handling heterogeneous arrays. Use plotms to inspect the "Wt" column as a reference for determining the scaling factors. See the cookbook for more details. example: [1.,3.,3.] - scale the weights of the second and third MS by a factor 3. default: [] (empty list) - no scaling """ ### #Python script try: casalog.origin('concat') t = tbtool() m = mstool() #break the reference between vis and vislist as we modify vis if (type(vislist) == str): vis = [vislist] else: vis = list(vislist) #dto. for concavis theconcatvis = concatvis # warn if there are MMSs mmslist = [] for elvis in vis: ###Oh no Elvis does not exist Mr Bill if (ParallelTaskHelper.isParallelMS(elvis)): mmslist.append(elvis) if len(mmslist) > 0: if (vis[0] == mmslist[0]): casalog.post( '*** The first input MS is a multi-MS to which no row can be added. Cannot proceed.', 'WARN') casalog.post( '*** Please use virtualconcat or convert the first input MS to a normal MS using split.', 'WARN') raise Exception, 'Cannot append to a multi-MS. Please use virtualconcat.' casalog.post( '*** The following input measurement sets are multi-MSs', 'INFO') for mname in mmslist: casalog.post('*** ' + mname, 'INFO') casalog.post( '*** Use virtualconcat to produce a single multi-MS from several multi-MSs.', 'INFO') doweightscale = False if (len(visweightscale) > 0): if (len(visweightscale) != len(vis)): raise Exception, 'parameter visweightscale must have same number of elements as parameter vis' for factor in visweightscale: if factor < 0.: raise Exception, 'parameter visweightscale must only contain positive numbers' elif factor != 1.: doweightscale = True # process the input MSs in chronological order sortedvis = [] sortedvisweightscale = [] namestuples = [] for name in vis: t.open(name) times = t.getcol('TIME') t.close() times.sort() if doweightscale: namestuples.append( (times[0], name, visweightscale[vis.index(name)])) else: namestuples.append((times[0], name, 0)) sorted_namestuples = sorted(namestuples, key=lambda msname: msname[0]) for i in range(0, len(vis)): sortedvis.append(sorted_namestuples[i][1]) sortedvisweightscale.append(sorted_namestuples[i][2]) if ((type(concatvis) != str) or (len(concatvis.split()) < 1)): raise Exception, 'parameter concatvis is invalid' existingconcatvis = False if (vis.count(concatvis) > 0): existingconcatvis = True cvisindex = sortedvis.index(concatvis) if not sorted_namestuples[cvisindex][0] == sorted_namestuples[0][0]: raise Exception, 'If concatvis is set to the name of an existing MS in vis, it must be the chronologically first.'+\ '\n I.e. in this case you should set concatvis to '+sortedvis[0] sortedvis.pop(cvisindex) if doweightscale: vwscale = sortedvisweightscale[cvisindex] sortedvisweightscale.pop(cvisindex) sortedvisweightscale = [ vwscale ] + sortedvisweightscale # move the corresponding weight to the front if not vis == sortedvis: casalog.post( 'The list of input MSs is not in chronological order and will need to be sorted.', 'INFO') casalog.post( 'The chronological order in which the concatenation will take place is:', 'INFO') if existingconcatvis: casalog.post( ' MJD ' + str( qa.splitdate(qa.quantity(sorted_namestuples[0][0], 's'))['mjd']) + ': ' + concatvis, 'INFO') for name in sortedvis: casalog.post( ' MJD ' + str( qa.splitdate( qa.quantity( sorted_namestuples[sortedvis.index(name)][0], 's'))['mjd']) + ': ' + name, 'INFO') if doweightscale: casalog.post( 'In this new order, the weights are:' + str(sortedvisweightscale), 'INFO') # replace the original vis and visweightscale by the sorted ones (with concatvis removed if it exists) vis = sortedvis visweightscale = sortedvisweightscale if (os.path.exists(concatvis)): casalog.post( 'Will be concatenating into the existing ms ' + concatvis, 'WARN') if doweightscale and not existingconcatvis: visweightscale = [ 1. ] + visweightscale # set the weight for this existing MS to 1. casalog.post( 'The weights for this existing MS will be left unchanged.', 'WARN') else: if (len(vis) > 0): # (note: in case len is 1, we only copy, essentially) casalog.post('copying ' + vis[0] + ' to ' + theconcatvis, 'INFO') shutil.copytree(vis[0], theconcatvis) # note that the resulting copy is writable even if the original was read-only vis.pop(0) # don't need to pop visweightscale here! if not copypointing: # remove the rows from the POINTING table of the first MS casalog.post( '*** copypointing==False: resulting MS will have empty POINTING table.', 'INFO') tmptabname = 'TMPPOINTING' + str(time.time()) shutil.rmtree(tmptabname, ignore_errors=True) shutil.move(theconcatvis + '/POINTING', tmptabname) t.open(tmptabname) if (t.nrows() > 0): ttab = t.copy(newtablename=theconcatvis + '/POINTING', deep=False, valuecopy=True, norows=True) ttab.close() t.close() shutil.rmtree(tmptabname, ignore_errors=True) else: # the POINTING table is already empty casalog.post('*** Input POINTING table was already empty.', 'INFO') shutil.move(tmptabname, theconcatvis + '/POINTING') t.close() # Determine if scratch columns should be considered at all # by checking if any of the MSs has them. considerscrcols = False considercorr = False considermodel = False needscrcols = [] needmodel = [] needcorr = [] if ((type(theconcatvis) == str) and (os.path.exists(theconcatvis))): # check if all scratch columns are present t.open(theconcatvis) if (t.colnames().count('MODEL_DATA') == 1): considermodel = True if (t.colnames().count('CORRECTED_DATA') == 1): considercorr = True needscrcols.append(t.colnames().count('CORRECTED_DATA') == 0 or t.colnames().count('MODEL_DATA') == 0) needmodel.append(t.colnames().count('MODEL_DATA') == 0) needcorr.append(t.colnames().count('CORRECTED_DATA') == 0) t.close() else: raise Exception, 'Visibility data set ' + theconcatvis + ' not found - please verify the name' for elvis in vis: ###Oh no Elvis does not exist Mr Bill if (not os.path.exists(elvis)): raise Exception, 'Visibility data set ' + elvis + ' not found - please verify the name' # check if all scratch columns are present t.open(elvis) if (t.colnames().count('MODEL_DATA') == 1): considermodel = True if (t.colnames().count('CORRECTED_DATA') == 1): considercorr = True needscrcols.append(t.colnames().count('CORRECTED_DATA') == 0 or t.colnames().count('MODEL_DATA') == 0) needmodel.append(t.colnames().count('MODEL_DATA') == 0) needcorr.append(t.colnames().count('CORRECTED_DATA') == 0) t.close() considerscrcols = (considercorr or considermodel) # there are scratch columns # start actual work, file existence has already been checked i = 0 if (considerscrcols and needscrcols[i]): # create scratch cols casalog.post('creating scratch columns in ' + theconcatvis, 'INFO') cb.open(theconcatvis, addcorr=(considercorr and needcorr[i]), addmodel=(considermodel and needmodel[i] )) # calibrator-open creates scratch columns cb.close() # scale the weights and sigma of the first MS in the chain if doweightscale: wscale = visweightscale[i] if (wscale == 1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post( 'Scaling weights for first MS by factor ' + str(wscale), 'INFO') t.open(theconcatvis, nomodify=False) for colname in ['WEIGHT', 'WEIGHT_SPECTRUM']: if (colname in t.colnames()) and (t.iscelldefined( colname, 0)): for j in xrange(0, t.nrows()): a = t.getcell(colname, j) a *= wscale t.putcell(colname, j, a) for colname in ['SIGMA']: if (wscale > 0. and colname in t.colnames()) and (t.iscelldefined( colname, 0)): sscale = 1. / sqrt(wscale) for j in xrange(0, t.nrows()): a = t.getcell(colname, j) a *= sscale t.putcell(colname, j, a) t.close() # determine handling switch value handlingswitch = 0 if not copypointing: handlingswitch = 2 m.open(theconcatvis, nomodify=False) mmsmembers = [theconcatvis] for elvis in vis: i = i + 1 destms = "" casalog.post('concatenating ' + elvis + ' into ' + theconcatvis, 'INFO') wscale = 1. if doweightscale: wscale = visweightscale[i] if (wscale == 1.): casalog.post( 'Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post( 'Will scale weights for this MS by factor ' + str(wscale), 'INFO') if (considerscrcols and needscrcols[i]): if (ParallelTaskHelper.isParallelMS(elvis)): raise Exception, 'Cannot create scratch columns in a multi-MS. Use virtualconcat.' else: # create scratch cols casalog.post( 'creating scratch columns for ' + elvis + ' (original MS unchanged)', 'INFO') tempname = elvis + '_with_scrcols' shutil.rmtree(tempname, ignore_errors=True) shutil.copytree(elvis, tempname) cb.open( tempname, addcorr=(considercorr and needcorr[i]), addmodel=(considermodel and needmodel[i] )) # calibrator-open creates scratch columns cb.close() # concatenate copy instead of original file m.concatenate(msfile=tempname, freqtol=freqtol, dirtol=dirtol, respectname=respectname, weightscale=wscale, handling=handlingswitch, destmsfile=destms) shutil.rmtree(tempname, ignore_errors=True) else: m.concatenate(msfile=elvis, freqtol=freqtol, dirtol=dirtol, respectname=respectname, weightscale=wscale, handling=handlingswitch, destmsfile=destms) if timesort: casalog.post('Sorting main table by TIME ...', 'INFO') m.timesort() m.writehistory(message='taskname=concat', origin='concat') m.writehistory(message='vis = "' + str(vis) + '"', origin='concat') m.writehistory(message='concatvis = "' + str(concatvis) + '"', origin='concat') m.writehistory(message='freqtol = "' + str(freqtol) + '"', origin='concat') m.writehistory(message='dirtol = "' + str(dirtol) + '"', origin='concat') m.writehistory(message='respectname = "' + str(respectname) + '"', origin='concat') m.writehistory(message='copypointing = "' + str(copypointing) + '"', origin='concat') m.writehistory(message='visweightscale = "' + str(visweightscale) + '"', origin='concat') m.close() return True except Exception, instance: print '*** Error ***', instance raise Exception, instance
def virtualconcat(vislist,concatvis,freqtol,dirtol,respectname, visweightscale,keepcopy,copypointing): """ Concatenate visibility data sets creating a multi-MS. Combine the input datasets into a multi-MS. NOTE: The input datasets are moved into the multi-MS and may be modified to account for subtable reference changes. If none of the input MSs have any scratch columns, none are created. Otherwise scratch columns are created and initialized in those MSs which don't have a complete set. Keyword arguments: vis -- Name of input visibility files (MS) default: none; example: vis=['ngc5921-1.ms', 'ngc5921-2.ms'] concatvis -- Name of the output visibility file default: none; example: concatvis='src2.ms' freqtol -- Frequency shift tolerance for considering data as the same spwid default: '' means always combine example: freqtol='10MHz' will not combine spwid unless they are within 10 MHz dirtol -- Direction shift tolerance for considering data as the same field default: ;; means always combine example: dirtol='1.arcsec' will not combine data for a field unless their phase center is less than 1 arcsec. respectname -- If true, fields with a different name are not merged even if their direction agrees (within dirtol) default: True visweightscale -- list of the weight scales to be applied to the individual MSs default: [] (don't modify weights, equivalent to setting scale to 1 for each MS) keepcopy -- If true, a copy of the input MSs is kept in their original place. default: false copypointing -- If true, the POINTING table information will be present in the output. If false, the result is an empty POINTING table. default: True """ ### #Python script tempdir = '' originalvis = vislist try: casalog.origin('virtualconcat') t = tbtool() m = mstool() #break the reference between vis and vislist as we modify vis if(type(vislist)==str): vis=[vislist] else: vis=list(vislist) #dto. for concavis theconcatvis = concatvis doweightscale = False if(len(visweightscale)>0): if (len(visweightscale) != len(vis)): raise Exception, 'parameter visweightscale must have same number of elements as parameter vis' for factor in visweightscale: if factor<0.: raise Exception, 'parameter visweightscale must only contain positive numbers' elif factor!=1.: doweightscale=True if((type(concatvis)!=str) or (len(concatvis.split()) < 1)): raise Exception, 'Parameter concatvis is invalid.' if(vis.count(concatvis) > 0): raise Exception, 'Parameter concatvis must not be equal to one of the members of parameter vis.' if(os.path.exists(concatvis)): raise Exception, 'The output MMS must not yet exist.' # process the input MSs in chronological order sortedvis = [] sortedvisweightscale = [] namestuples = [] for name in vis: t.open(name) times = t.getcol('TIME') t.close() times.sort() if doweightscale: namestuples.append( (times[0], name, visweightscale[vis.index(name)]) ) else: namestuples.append( (times[0], name, 0) ) sorted_namestuples = sorted(namestuples, key=lambda msname: msname[0]) for i in range(0,len(vis)): sortedvis.append(sorted_namestuples[i][1]) sortedvisweightscale.append(sorted_namestuples[i][2]) if not vis == sortedvis: casalog.post('The list of input MSs is not in chronological order and will need to be sorted.' , 'INFO') casalog.post('The chronological order in which the concatenation will take place is:' , 'INFO') for name in sortedvis: casalog.post(' MJD '+str(qa.splitdate(qa.quantity(sorted_namestuples[sortedvis.index(name)][0],'s'))['mjd'])+': '+name, 'INFO') if doweightscale: casalog.post('In this new order, the weights are:'+str(sortedvisweightscale) , 'INFO') # replace the original vis and visweightscale by the sorted ones (with concatvis removed if it exists) vis = sortedvis visweightscale = sortedvisweightscale # if there are MMSs among the input, make their constituents the new input mmslist = [] ismaster = [] for elvis in vis: ismaster.append(True) # may be revised later if(ParallelTaskHelper.isParallelMS(elvis)): mmslist.append(elvis) if len(mmslist)>0: casalog.post('*** The following input measurement sets are multi-MSs', 'INFO') for mname in mmslist: casalog.post('*** '+mname, 'INFO') oldvis = vis oldvisweightscale = visweightscale vis = [] visweightscale = [] ismaster = [] # reset ismaster i = 0 for elvis in oldvis: if elvis in mmslist: # append the subMSs individually m.open(elvis) mses = m.getreferencedtables() m.close() mses.sort() mastername = os.path.basename(os.path.dirname(os.path.realpath(elvis+'/ANTENNA'))) for mname in mses: #print 'subms: ', mname vis.append(mname) if doweightscale: visweightscale.append(oldvisweightscale[i]) if os.path.basename(mname) == mastername: ismaster.append(True) else: ismaster.append(False) else: vis.append(elvis) if doweightscale: visweightscale.append(oldvisweightscale[i]) ismaster.append(True) i += 1 if keepcopy: casalog.post('*** keepcopy==True: creating copy of input MSs to keep ...' , 'INFO') tempdir = 'concat_tmp_'+str(time.time()) os.mkdir(tempdir) for elvis in originalvis: shutil.move(elvis,tempdir) # keep timestamps and permissions shutil.copytree(tempdir+'/'+elvis, elvis, True) # symlinks=True casalog.post('Concatenating ...' , 'INFO') if not copypointing: # delete the rows of all pointing tables casalog.post('*** copypointing==False: resulting MMS will have empty POINTING table.', 'INFO') tmptabname = 'TMPPOINTING'+str(time.time()) tmptabname2 = 'TMPPOINTING2'+str(time.time()) shutil.rmtree(tmptabname, ignore_errors=True) shutil.rmtree(tmptabname2, ignore_errors=True) shutil.move(vis[0]+'/POINTING', tmptabname) t.open(tmptabname) if(t.nrows()>0): ttab = t.copy(newtablename=tmptabname2, deep=False, valuecopy=True, norows=True) ttab.close() t.close() shutil.rmtree(tmptabname, ignore_errors=True) else: # the POINTING table is already empty t.close() casalog.post('*** Input POINTING table was already empty.', 'INFO') shutil.move(tmptabname, tmptabname2) for i in range(len(vis)): # replace the POINTING tables by the empty one os.system('rm -rf '+vis[i]+'/POINTING') shutil.copytree(tmptabname2, vis[i]+'/POINTING') shutil.rmtree(tmptabname2, ignore_errors=True) if(len(vis) >0): # (note: in case len is 1, we only copy, essentially) theconcatvis = vis[0] if(len(vis)==1): shutil.copytree(vis[0], concatvis, True) vis.remove(vis[0]) # Determine if scratch columns should be considered at all # by checking if any of the MSs has them. considerscrcols = False needscrcols = [] if ((type(theconcatvis)==str) and (os.path.exists(theconcatvis))): # check if all scratch columns are present t.open(theconcatvis) if(t.colnames().count('CORRECTED_DATA')==1 or t.colnames().count('MODEL_DATA')==1): considerscrcols = True # there are scratch columns needscrcols.append(t.colnames().count('CORRECTED_DATA')==0 or t.colnames().count('MODEL_DATA')==0) t.close() else: raise Exception, 'Visibility data set '+theconcatvis+' not found - please verify the name' for elvis in vis : ###Oh no Elvis does not exist Mr Bill if(not os.path.exists(elvis)): raise Exception, 'Visibility data set '+elvis+' not found - please verify the name' # check if all scratch columns are present t.open(elvis) if(t.colnames().count('CORRECTED_DATA')==1 or t.colnames().count('MODEL_DATA')==1): considerscrcols = True # there are scratch columns needscrcols.append(t.colnames().count('CORRECTED_DATA')==0 or t.colnames().count('MODEL_DATA')==0) t.close() # start actual work, file existence has already been checked if(considerscrcols and needscrcols[0]): # create scratch cols casalog.post('creating scratch columns in '+theconcatvis , 'INFO') cb.open(theconcatvis) # calibrator-open creates scratch columns cb.close() # scale the weights of the first MS in the chain if doweightscale: wscale = visweightscale[0] if(wscale==1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post('Scaling weights for first MS by factor '+str(wscale), 'INFO') t.open(theconcatvis, nomodify=False) for colname in [ 'WEIGHT', 'WEIGHT_SPECTRUM']: if (colname in t.colnames()) and (t.iscelldefined(colname,0)): for j in xrange(0,t.nrows()): a = t.getcell(colname, j) a *= wscale t.putcell(colname, j, a) t.close() m.open(theconcatvis,nomodify=False) mmsmembers = [theconcatvis] auxfile = 'concat_aux_'+str(time.time()) i = 0 for elvis in vis : i = i + 1 mmsmembers.append(elvis) casalog.post('adding '+elvis+' to multi-MS '+concatvis, 'INFO') wscale = 1. if doweightscale: wscale = visweightscale[i] if(wscale==1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post('Will scale weights for this MS by factor '+str(wscale) , 'INFO') if(considerscrcols and needscrcols[i]): # create scratch cols casalog.post('creating scratch columns for '+elvis, 'INFO') cb.open(elvis) # calibrator-open creates scratch columns cb.close() m.virtconcatenate(msfile=elvis, auxfilename=auxfile, freqtol=freqtol,dirtol=dirtol,respectname=respectname, weightscale=wscale) #end for os.remove(auxfile) m.writehistory(message='taskname=virtualconcat',origin='virtualconcat') m.writehistory(message='vis = "'+str(vis)+'"',origin='virtualconcat') m.writehistory(message='concatvis = "'+str(concatvis)+'"',origin='virtualconcat') m.writehistory(message='freqtol = "'+str(freqtol)+'"',origin='virtualconcat') m.writehistory(message='dirtol = "'+str(dirtol)+'"',origin='virtualconcat') m.writehistory(message='respectname = "'+str(respectname)+'"',origin='virtualconcat') m.writehistory(message='visweightscale = "'+str(visweightscale)+'"',origin='virtualconcat') m.close() # concatenate the POINTING tables masterptable = mmsmembers[0]+'/POINTING' ptablemembers = [] if os.path.exists(masterptable) and copypointing: casalog.post('Concatenating the POINTING tables ...', 'INFO') i = 0 for i in xrange(len(mmsmembers)): ptable = mmsmembers[i]+'/POINTING' if ismaster[i] and os.path.exists(ptable): casalog.post(' '+ptable, 'INFO') shutil.move(ptable, ptable+str(i)) ptablemembers.append(ptable+str(i)) #end for t.createmultitable(masterptable, ptablemembers, 'SUBTBS') # endif ph.makeMMS(concatvis, mmsmembers, True, # copy subtables from first to all other members ['POINTING', 'SYSCAL', 'SYSPOWER']) # excluding tables which will be linked # remove the remaining "hulls" of the emptied input MMSs (if there are any) for elvis in mmslist: shutil.rmtree(elvis) if keepcopy: for elvis in originalvis: shutil.move(tempdir+'/'+elvis, elvis) os.rmdir(tempdir) except Exception, instance: print '*** Error ***',instance if keepcopy and tempdir!='': print "Restoring original MSs ..." for elvis in originalvis: if os.path.exists(tempdir+'/'+elvis): shutil.rmtree(elvis) shutil.move(tempdir+'/'+elvis, elvis) os.rmdir(tempdir) raise Exception, instance
def virtualconcat(vislist, concatvis, freqtol, dirtol, respectname, visweightscale, keepcopy, copypointing): """ Concatenate visibility data sets creating a Multi-MS. Combine the input datasets into a Multi-MS. NOTE: The input datasets are moved into the Multi-MS and may be modified to account for subtable reference changes. If none of the input MSs have any scratch columns, none are created. Otherwise scratch columns are created and initialized in those MSs which don't have a complete set. Keyword arguments: vis -- Name of input visibility files (MS) default: none; example: vis=['ngc5921-1.ms', 'ngc5921-2.ms'] concatvis -- Name of the output visibility file default: none; example: concatvis='src2.ms' freqtol -- Frequency shift tolerance for considering data as the same spwid default: '' means always combine example: freqtol='10MHz' will not combine spwid unless they are within 10 MHz dirtol -- Direction shift tolerance for considering data as the same field default: ;; means always combine example: dirtol='1.arcsec' will not combine data for a field unless their phase center is less than 1 arcsec. respectname -- If true, fields with a different name are not merged even if their direction agrees (within dirtol) default: True visweightscale -- list of the weight scales to be applied to the individual MSs default: [] (don't modify weights, equivalent to setting scale to 1 for each MS) keepcopy -- If true, a copy of the input MSs is kept in their original place. default: false copypointing -- If true, the POINTING table information will be present in the output. If false, the result is an empty POINTING table. default: True """ ### #Python script tempdir = '' originalvis = vislist try: casalog.origin('virtualconcat') t = tbtool() m = mstool() #break the reference between vis and vislist as we modify vis if (type(vislist) == str): vis = [vislist] else: vis = list(vislist) #dto. for concavis theconcatvis = concatvis doweightscale = False if (len(visweightscale) > 0): if (len(visweightscale) != len(vis)): raise Exception, 'parameter visweightscale must have same number of elements as parameter vis' for factor in visweightscale: if factor < 0.: raise Exception, 'parameter visweightscale must only contain positive numbers' elif factor != 1.: doweightscale = True if ((type(concatvis) != str) or (len(concatvis.split()) < 1)): raise Exception, 'Parameter concatvis is invalid.' if (vis.count(concatvis) > 0): raise Exception, 'Parameter concatvis must not be equal to one of the members of parameter vis.' if (os.path.exists(concatvis)): raise Exception, 'The output MMS must not yet exist.' # process the input MSs in chronological order sortedvis = [] sortedvisweightscale = [] namestuples = [] for name in vis: t.open(name) times = t.getcol('TIME') t.close() times.sort() if doweightscale: namestuples.append( (times[0], name, visweightscale[vis.index(name)])) else: namestuples.append((times[0], name, 0)) sorted_namestuples = sorted(namestuples, key=lambda msname: msname[0]) for i in range(0, len(vis)): sortedvis.append(sorted_namestuples[i][1]) sortedvisweightscale.append(sorted_namestuples[i][2]) if not vis == sortedvis: casalog.post( 'The list of input MSs is not in chronological order and will need to be sorted.', 'INFO') casalog.post( 'The chronological order in which the concatenation will take place is:', 'INFO') for name in sortedvis: casalog.post( ' MJD ' + str( qa.splitdate( qa.quantity( sorted_namestuples[sortedvis.index(name)][0], 's'))['mjd']) + ': ' + name, 'INFO') if doweightscale: casalog.post( 'In this new order, the weights are:' + str(sortedvisweightscale), 'INFO') # replace the original vis and visweightscale by the sorted ones (with concatvis removed if it exists) vis = sortedvis visweightscale = sortedvisweightscale # if there are MMSs among the input, make their constituents the new input mmslist = [] ismaster = [] for elvis in vis: ismaster.append(True) # may be revised later if (ParallelTaskHelper.isParallelMS(elvis)): mmslist.append(elvis) if len(mmslist) > 0: casalog.post( '*** The following input measurement sets are multi-MSs', 'INFO') for mname in mmslist: casalog.post('*** ' + mname, 'INFO') oldvis = vis oldvisweightscale = visweightscale vis = [] visweightscale = [] ismaster = [] # reset ismaster i = 0 for elvis in oldvis: if elvis in mmslist: # append the subMSs individually m.open(elvis) mses = m.getreferencedtables() m.close() mses.sort() mastername = os.path.basename( os.path.dirname(os.path.realpath(elvis + '/ANTENNA'))) for mname in mses: #print 'subms: ', mname vis.append(mname) if doweightscale: visweightscale.append(oldvisweightscale[i]) if os.path.basename(mname) == mastername: ismaster.append(True) else: ismaster.append(False) else: vis.append(elvis) if doweightscale: visweightscale.append(oldvisweightscale[i]) ismaster.append(True) i += 1 if keepcopy: casalog.post( '*** keepcopy==True: creating copy of input MSs to keep ...', 'INFO') tempdir = 'concat_tmp_' + str(time.time()) os.mkdir(tempdir) for elvis in originalvis: shutil.move(elvis, tempdir) # keep timestamps and permissions shutil.copytree(tempdir + '/' + elvis, elvis, True) # symlinks=True casalog.post('Concatenating ...', 'INFO') if not copypointing: # delete the rows of all pointing tables casalog.post( '*** copypointing==False: resulting MMS will have empty POINTING table.', 'INFO') tmptabname = 'TMPPOINTING' + str(time.time()) tmptabname2 = 'TMPPOINTING2' + str(time.time()) shutil.rmtree(tmptabname, ignore_errors=True) shutil.rmtree(tmptabname2, ignore_errors=True) shutil.move(vis[0] + '/POINTING', tmptabname) t.open(tmptabname) if (t.nrows() > 0): ttab = t.copy(newtablename=tmptabname2, deep=False, valuecopy=True, norows=True) ttab.close() t.close() shutil.rmtree(tmptabname, ignore_errors=True) else: # the POINTING table is already empty t.close() casalog.post('*** Input POINTING table was already empty.', 'INFO') shutil.move(tmptabname, tmptabname2) for i in range( len(vis)): # replace the POINTING tables by the empty one os.system('rm -rf ' + vis[i] + '/POINTING') shutil.copytree(tmptabname2, vis[i] + '/POINTING') shutil.rmtree(tmptabname2, ignore_errors=True) if (len(vis) > 0): # (note: in case len is 1, we only copy, essentially) theconcatvis = vis[0] if (len(vis) == 1): shutil.copytree(vis[0], concatvis, True) vis.remove(vis[0]) # Determine if scratch columns should be considered at all # by checking if any of the MSs has them. considerscrcols = False needscrcols = [] if ((type(theconcatvis) == str) and (os.path.exists(theconcatvis))): # check if all scratch columns are present t.open(theconcatvis) if (t.colnames().count('CORRECTED_DATA') == 1 or t.colnames().count('MODEL_DATA') == 1): considerscrcols = True # there are scratch columns needscrcols.append(t.colnames().count('CORRECTED_DATA') == 0 or t.colnames().count('MODEL_DATA') == 0) t.close() else: raise Exception, 'Visibility data set ' + theconcatvis + ' not found - please verify the name' for elvis in vis: ###Oh no Elvis does not exist Mr Bill if (not os.path.exists(elvis)): raise Exception, 'Visibility data set ' + elvis + ' not found - please verify the name' # check if all scratch columns are present t.open(elvis) if (t.colnames().count('CORRECTED_DATA') == 1 or t.colnames().count('MODEL_DATA') == 1): considerscrcols = True # there are scratch columns needscrcols.append(t.colnames().count('CORRECTED_DATA') == 0 or t.colnames().count('MODEL_DATA') == 0) t.close() # start actual work, file existence has already been checked if (considerscrcols and needscrcols[0]): # create scratch cols casalog.post('creating scratch columns in ' + theconcatvis, 'INFO') _cb.open(theconcatvis) # calibrator-open creates scratch columns _cb.close() # scale the weights of the first MS in the chain if doweightscale: wscale = visweightscale[0] if (wscale == 1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post( 'Scaling weights for first MS by factor ' + str(wscale), 'INFO') t.open(theconcatvis, nomodify=False) for colname in ['WEIGHT', 'WEIGHT_SPECTRUM']: if (colname in t.colnames()) and (t.iscelldefined( colname, 0)): for j in xrange(0, t.nrows()): a = t.getcell(colname, j) a *= wscale t.putcell(colname, j, a) t.close() m.open(theconcatvis, nomodify=False) mmsmembers = [theconcatvis] auxfile = 'concat_aux_' + str(time.time()) i = 0 for elvis in vis: i = i + 1 mmsmembers.append(elvis) casalog.post('adding ' + elvis + ' to multi-MS ' + concatvis, 'INFO') wscale = 1. if doweightscale: wscale = visweightscale[i] if (wscale == 1.): casalog.post( 'Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post( 'Will scale weights for this MS by factor ' + str(wscale), 'INFO') if (considerscrcols and needscrcols[i]): # create scratch cols casalog.post('creating scratch columns for ' + elvis, 'INFO') _cb.open(elvis) # calibrator-open creates scratch columns _cb.close() m.virtconcatenate(msfile=elvis, auxfilename=auxfile, freqtol=freqtol, dirtol=dirtol, respectname=respectname, weightscale=wscale) #end for os.remove(auxfile) m.writehistory(message='taskname=virtualconcat', origin='virtualconcat') m.writehistory(message='vis = "' + str(vis) + '"', origin='virtualconcat') m.writehistory(message='concatvis = "' + str(concatvis) + '"', origin='virtualconcat') m.writehistory(message='freqtol = "' + str(freqtol) + '"', origin='virtualconcat') m.writehistory(message='dirtol = "' + str(dirtol) + '"', origin='virtualconcat') m.writehistory(message='respectname = "' + str(respectname) + '"', origin='virtualconcat') m.writehistory(message='visweightscale = "' + str(visweightscale) + '"', origin='virtualconcat') m.close() # concatenate the POINTING tables masterptable = mmsmembers[0] + '/POINTING' ptablemembers = [] if os.path.exists(masterptable) and copypointing: casalog.post('Concatenating the POINTING tables ...', 'INFO') i = 0 for i in xrange(len(mmsmembers)): ptable = mmsmembers[i] + '/POINTING' if ismaster[i] and os.path.exists(ptable): casalog.post(' ' + ptable, 'INFO') shutil.move(ptable, ptable + str(i)) ptablemembers.append(ptable + str(i)) #end for t.createmultitable(masterptable, ptablemembers, 'SUBTBS') # endif # Get all available subtables thesubtables = ph.getSubtables(mmsmembers[0]) # Remove the SOURCE and HISTORY tables, which will be the only copied. # All other sub-tables will be linked to first subms thesubtables.remove('SOURCE') thesubtables.remove('HISTORY') subtabs_to_omit = thesubtables ph.makeMMS( concatvis, mmsmembers, True, # copy subtables from first to all other members subtabs_to_omit) # excluding tables which will be linked # remove the remaining "hulls" of the emptied input MMSs (if there are any) for elvis in mmslist: shutil.rmtree(elvis) if keepcopy: for elvis in originalvis: shutil.move(tempdir + '/' + elvis, elvis) os.rmdir(tempdir) except Exception, instance: print '*** Error ***', instance if keepcopy and tempdir != '': print "Restoring original MSs ..." for elvis in originalvis: if os.path.exists(tempdir + '/' + elvis): shutil.rmtree(elvis) shutil.move(tempdir + '/' + elvis, elvis) os.rmdir(tempdir) raise Exception, instance
def test_mpi4casa_flagdata_list_return_async(self): """Test flagdata summary in async mode""" # First run flagdata sequentially bypassParallelProcessing = ParallelTaskHelper.getBypassParallelProcessing() ParallelTaskHelper.bypassParallelProcessing(2) res = flagdata(vis=self.vis, mode='summary') ParallelTaskHelper.bypassParallelProcessing(bypassParallelProcessing) # Make a copy of the input MMS for each flagdata instance os.system("cp -r %s %s" % (self.vis,self.vis2)) os.system("cp -r %s %s" % (self.vis,self.vis3)) # Set async mode in ParallelTaskHelper ParallelTaskHelper.setAsyncMode(True) # Run applycal in MMS mode with the first set request_id_1 = flagdata(vis=self.vis, mode='summary') # Run applycal in MMS mode with the second set request_id_2 = flagdata(vis=self.vis2, mode='summary') # Run applycal in MMS mode with the third set request_id_3 = flagdata(vis=self.vis3, mode='summary') # Get response in block mode reques_id_list = request_id_1 + request_id_2 + request_id_3 command_response_list = self.client.get_command_response(reques_id_list,True,True) # Get result res1 = ParallelTaskHelper.getResult(request_id_1,'flagdata') res2 = ParallelTaskHelper.getResult(request_id_2,'flagdata') res3 = ParallelTaskHelper.getResult(request_id_3,'flagdata') # Unset async mode in ParallelTaskHelper ParallelTaskHelper.setAsyncMode(False) self.assertEqual(res1,res, "flagdata dictionary does not match for the first flagdata run") self.assertEqual(res2,res, "flagdata dictionary does not match for the second flagdata run") self.assertEqual(res3,res, "flagdata dictionary does not match for the third flagdata run")
imt = imtool() imt.open(vis, usescratch=False) imt.calcuvw(fldids, refcode='J2000', reuse=False) imt.close() imt = None if ((oldrefcol != []) and (thenewref > 0)): tbt.open(vis + '/FIELD', nomodify=False) tbt.putcol('PhaseDir_Ref', oldrefcol) tbt.close() else: casalog.post("UVW coordinates not changed.", 'NORMAL') if (ParallelTaskHelper.isParallelMS(vis)): casalog.post("Tidying up the MMS subtables ...", 'NORMAL') ParallelTaskHelper.restoreSubtableAgreement(vis) mst = None tbt = None return True except Exception, instance: mst = None tbt = None imt = None casalog.post("*** Error \'%s\' " % (instance), 'SEVERE') return False
class test_simplecluster(unittest.TestCase): projectname="test_simplecluster" clusterfile="test_simplecluster_config.txt" monitorFile="monitoring.log" cluster=None # Get local host configuration parameters host=os.uname()[1] cwd=os.getcwd() ncpu=multiprocessing.cpu_count() # jagonzal (CAS-4287): Add a cluster-less mode to by-pass parallel processing for MMSs as requested if os.environ.has_key('BYPASS_SEQUENTIAL_PROCESSING'): ParallelTaskHelper.bypassParallelProcessing(1) bypassParallelProcessing = True else: bypassParallelProcessing = False vis = "" ref = "" aux = "" def stopCluster(self): # Stop thread services and cluster self.cluster.stop_cluster() # Remove log files, cluster files, and monitoring files self.cleanUp() def cleanUp(self): logfiles=glob.glob("engine-*.log") for i in logfiles: os.remove(i) if os.path.exists(self.clusterfile): os.remove(self.clusterfile) if os.path.exists(self.monitorFile): os.remove(self.monitorFile) def initCluster(self,userMonitorFile="",max_engines=2,max_memory=0.,memory_per_engine=512.): # First of all clean up files from previous sessions self.cleanUp() # Create cluster object if (len(userMonitorFile) > 0): self.cluster = simple_cluster(userMonitorFile) self.monitorFile = userMonitorFile else: self.cluster = simple_cluster() self.monitorFile = "monitoring.log" # Create cluster configuration file self.createClusterFile(max_engines,max_memory,memory_per_engine) # Initialize cluster object if (self.cluster.init_cluster(self.clusterfile, self.projectname)): self.cluster.check_resource() # Wait unit cluster is producing monitoring info if (len(userMonitorFile) > 0): self.waitForFile(userMonitorFile, 20) else: self.waitForFile('monitoring.log', 20) def createClusterFile(self,max_engines=0.,max_memory=0.,memory_per_engine=512.): if (max_engines>1): max_memory = 512*max_engines msg=self.host + ', ' + str(max_engines) + ', ' + self.cwd + ', ' + str(max_memory) else: msg=self.host + ', ' + str(max_engines) + ', ' + self.cwd + ', ' + str(max_memory) + ', ' + str(memory_per_engine) f=open(self.clusterfile, 'w') f.write(msg) f.close() self.waitForFile(self.clusterfile, 10) def waitForFile(self, file, seconds): for i in range(0,seconds): if (os.path.isfile(file)): return time.sleep(1) def setUpFile(self,file,type_file): if type(file) is list: for file_i in file: self.setUpFileCore(file_i,type_file) else: self.setUpFileCore(file,type_file) if type_file=='vis': self.vis = file elif type_file =='ref': self.ref = file elif type_file=='aux': self.aux = file def setUpFileCore(self,file,type_file): if os.path.exists(file): print "%s file %s is already in the working area, deleting ..." % (type_file,file) os.system('rm -rf ' + file) print "Copy %s file %s into the working area..." % (type_file,file) os.system('cp -R ' + os.environ.get('CASAPATH').split()[0] + '/data/regression/unittest/simplecluster/' + file + ' ' + file) def create_input(self,str_text, filename): """Save the string in a text file""" inp = filename cmd = str_text # remove file first if os.path.exists(inp): os.system('rm -f '+ inp) # save to a file fid = open(inp, 'w') fid.write(cmd) # close file fid.close() # wait until file is visible for the filesystem self.waitForFile(filename, 10) return def test1_defaultCluster(self): """Test 1: Create a default cluster""" # Create cluster file self.initCluster(max_engines=0.) cluster_list = self.cluster.get_hosts() self.assertTrue(cluster_list[0][0]==self.host) self.assertTrue(cluster_list[0][2]==self.cwd) self.stopCluster() def test2_availableResourcesCluster(self): """Test 2: Create a custom cluster to use all the available resources""" # Create cluster file max_memory_local = self.ncpu*1024 self.initCluster(max_engines=1.,max_memory=max_memory_local,memory_per_engine=1024.) cluster_list = self.cluster.get_hosts() self.assertTrue(cluster_list[0][0]==self.host) self.assertTrue(cluster_list[0][1]==self.ncpu) self.assertTrue(cluster_list[0][2]==self.cwd) self.stopCluster() def test3_halfCPUCluster(self): """Test 3: Create a custom cluster to use half of available CPU capacity""" # Create cluster file max_memory_local = self.ncpu*512 self.initCluster(max_engines=0.5,max_memory=max_memory_local,memory_per_engine=512.) cluster_list = self.cluster.get_hosts() self.assertTrue(cluster_list[0][0]==self.host) self.assertTrue(cluster_list[0][2]==self.cwd) self.stopCluster() def test3_halfMemoryCluster(self): """Test 3: Create a custom cluster to use half of available RAM memory""" # Create cluster file self.initCluster(max_engines=self.ncpu,max_memory=0.5,memory_per_engine=128.) cluster_list = self.cluster.get_hosts() self.assertTrue(cluster_list[0][0]==self.host) self.assertTrue(cluster_list[0][2]==self.cwd) self.stopCluster() def test4_monitoringDefault(self): """Test 4: Check default monitoring file exists""" # Create cluster file self.initCluster() fid = open('monitoring.log', 'r') line = fid.readline() self.assertTrue(line.find('Host')>=0) self.assertTrue(line.find('Engine')>=0) self.assertTrue(line.find('Status')>=0) self.assertTrue(line.find('CPU[%]')>=0) self.assertTrue(line.find('Memory[%]')>=0) self.assertTrue(line.find('Time[s]')>=0) self.assertTrue(line.find('Read[MB]')>=0) self.assertTrue(line.find('Write[MB]')>=0) self.assertTrue(line.find('Read[MB/s]')>=0) self.assertTrue(line.find('Write[MB/s]')>=0) self.assertTrue(line.find('Job')>=0) self.assertTrue(line.find('Sub-MS')>=0) self.stopCluster() def test5_monitoringUser(self): """Test 5: Check custom monitoring file exists""" # Create cluster file self.initCluster('userMonitorFile.log') fid = open('userMonitorFile.log', 'r') line = fid.readline() self.assertTrue(line.find('Host')>=0) self.assertTrue(line.find('Engine')>=0) self.assertTrue(line.find('Status')>=0) self.assertTrue(line.find('CPU[%]')>=0) self.assertTrue(line.find('Memory[%]')>=0) self.assertTrue(line.find('Time[s]')>=0) self.assertTrue(line.find('Read[MB]')>=0) self.assertTrue(line.find('Write[MB]')>=0) self.assertTrue(line.find('Read[MB/s]')>=0) self.assertTrue(line.find('Write[MB/s]')>=0) self.assertTrue(line.find('Job')>=0) self.assertTrue(line.find('Sub-MS')>=0) self.stopCluster() def test6_monitoringStandAlone(self): """Test 6: Check the dict structure of the stand-alone method """ # Create cluster file self.initCluster('userMonitorFile.log') state = self.cluster.show_resource(True) cluster_list = self.cluster.get_hosts() for engine in range(cluster_list[0][1]): self.assertTrue(state[self.host][engine].has_key('Status')) self.assertTrue(state[self.host][engine].has_key('Sub-MS')) self.assertTrue(state[self.host][engine].has_key('Read')) self.assertTrue(state[self.host][engine].has_key('Write')) self.assertTrue(state[self.host][engine].has_key('Job')) self.assertTrue(state[self.host][engine].has_key('Memory')) self.assertTrue(state[self.host][engine].has_key('ReadRate')) self.assertTrue(state[self.host][engine].has_key('WriteRate')) self.stopCluster() def test7_bypassParallelProcessing(self): """Test 7: Bypass Parallel Processing mode """ simple_cluster.setDefaults(default_mem_per_engine=33554432) # Prepare MMS self.setUpFile("Four_ants_3C286.mms",'vis') # Create list file text = "mode='unflag'\n"\ "mode='clip' clipminmax=[0,0.1]" filename = 'list_flagdata.txt' self.create_input(text, filename) # step 1: Do unflag+clip flagdata(vis=self.vis, mode='list', inpfile=filename) # step 2: Now do summary ret_dict = flagdata(vis=self.vis, mode='summary') # Check summary self.assertTrue(ret_dict['name']=='Summary') self.assertTrue(ret_dict['spw']['15']['flagged'] == 96284.0) self.assertTrue(ret_dict['spw']['0']['flagged'] == 129711.0) self.assertTrue(ret_dict['spw']['1']['flagged'] == 128551.0) self.assertTrue(ret_dict['spw']['2']['flagged'] == 125686.0) self.assertTrue(ret_dict['spw']['3']['flagged'] == 122862.0) self.assertTrue(ret_dict['spw']['4']['flagged'] == 109317.0) self.assertTrue(ret_dict['spw']['5']['flagged'] == 24481.0) self.assertTrue(ret_dict['spw']['6']['flagged'] == 0) self.assertTrue(ret_dict['spw']['7']['flagged'] == 0) self.assertTrue(ret_dict['spw']['8']['flagged'] == 0) self.assertTrue(ret_dict['spw']['9']['flagged'] == 27422.0) self.assertTrue(ret_dict['spw']['10']['flagged'] == 124638.0) self.assertTrue(ret_dict['spw']['11']['flagged'] == 137813.0) self.assertTrue(ret_dict['spw']['12']['flagged'] == 131896.0) self.assertTrue(ret_dict['spw']['13']['flagged'] == 125074.0) self.assertTrue(ret_dict['spw']['14']['flagged'] == 118039.0) # Remove MMS os.system('rm -rf ' + self.vis) # Restore default values simple_cluster.setDefaults(default_mem_per_engine=512) if not self.bypassParallelProcessing: ParallelTaskHelper.bypassParallelProcessing(0) def test8_IgnoreNullSelectionError(self): """Test 8: Check that NullSelection errors happening for some sub-MSs are ignored """ """Note: In this test we also check simple_cluster initialization via ParallelTaskHelper """ # Prepare MMS self.setUpFile("Four_ants_3C286.mms",'vis') # Unflag entire MMS flagdata(vis=self.vis, mode='unflag') # Manually flag scan 30 flagdata(vis=self.vis, mode='manual', scan='30') # step 2: Now do summary ret_dict = flagdata(vis=self.vis, mode='summary') # Check summary self.assertTrue(ret_dict['scan']['30']['flagged'] == 2187264.0) self.assertTrue(ret_dict['scan']['31']['flagged'] == 0) # Stop cluster if it was started self.cluster = simple_cluster.getCluster() if (self.cluster != None): self.stopCluster() # Remove MMS os.system('rm -rf ' + self.vis)
def oldsplit(vis, outputvis, datacolumn, field, spw, width, antenna, timebin, timerange, scan, intent, array, uvrange, correlation, observation, combine, keepflags, keepmms): """Create a visibility subset from an existing visibility set: Keyword arguments: vis -- Name of input visibility file (MS) default: none; example: vis='ngc5921.ms' outputvis -- Name of output visibility file (MS) default: none; example: outputvis='ngc5921_src.ms' datacolumn -- Which data column to split out default='corrected'; example: datacolumn='data' Options: 'data', 'corrected', 'model', 'all', 'float_data', 'lag_data', 'float_data,data', and 'lag_data,data'. note: 'all' = whichever of the above that are present. field -- Field name default: field = '' means use all sources field = 1 # will get field_id=1 (if you give it an integer, it will retrieve the source with that index) field = '1328+307' specifies source '1328+307'. Minimum match can be used, egs field = '13*' will retrieve '1328+307' if it is unique or exists. Source names with imbedded blanks cannot be included. spw -- Spectral window index identifier default=-1 (all); example: spw=1 antenna -- antenna names default '' (all), antenna = '3 & 7' gives one baseline with antennaid = 3,7. timebin -- Interval width for time averaging. default: '0s' or '-1s' (no averaging) example: timebin='30s' timerange -- Time range default='' means all times. examples: timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss' timerange='< YYYY/MM/DD/HH:MM:SS.sss' timerange='> YYYY/MM/DD/HH:MM:SS.sss' timerange='< ddd/HH:MM:SS.sss' timerange='> ddd/HH:MM:SS.sss' scan -- Scan numbers to select. default '' (all). intent -- Scan intents to select. default '' (all). array -- (Sub)array IDs to select. default '' (all). uvrange -- uv distance range to select. default '' (all). correlation -- Select correlations, e.g. 'rr, ll' or ['XY', 'YX']. default '' (all). observation -- Select by observation ID(s). default '' (all). combine -- Data descriptors that time averaging can ignore: scan, and/or state Default '' (none) keepflags -- Keep flagged data, if possible Default True keepmms -- If the input is a multi-MS, make the output one, too. (experimental) Default: False """ casalog.origin('oldsplit') mylocals = locals() rval = True try: if (keepmms and ParallelTaskHelper.isParallelMS(vis)): if (timebin!='0s' and timebin!='-1s'): casalog.post('Averaging over time with keepmms=True may lead to results different\n' +' from those obtained with keepmms=False due to different binning.', 'WARN') myms = mstool() myms.open(vis) mses = myms.getreferencedtables() myms.close() mses.sort() nfail = 0 if os.path.exists(outputvis): raise ValueError, "Output MS %s already exists - will not overwrite." % outputvis tempout = outputvis+str(time.time()) os.mkdir(tempout) successfulmses = [] mastersubms = '' masterptab = '' emptyptab = tempout+'/EMPTY_POINTING' nochangeinpointing = (str(antenna)+str(timerange)=='') if nochangeinpointing: # resulting pointing table is the same for all # -> replace by empty table if it is a link and won't be modified anyway # and put back original into the master after split # find the master for m in mses: theptab = m+'/POINTING' if not os.path.islink(theptab): #print "is master ", theptab mastersubms = m masterptab = m+'/POINTING' # save time by not copying the POINTING table len(mses) times myttb = tbtool() myttb.open(masterptab) tmpp = myttb.copy(newtablename=emptyptab, norows=True) myttb.close() del myttb tmpp.close() del tmpp break mytb = tbtool() # prepare the input MMS for processing replaced = [] outputviss = [] theptabs = [] for m in mses: # make sure the SORTED_TABLE keywords are disabled mytb.open(m, nomodify=False) if 'SORTED_TABLE' in mytb.keywordnames(): tobedel = mytb.getkeyword('SORTED_TABLE').split(' ')[1] mytb.removekeyword('SORTED_TABLE') os.system('rm -rf '+tobedel) mytb.close() # deal with the POINTING table theptab = m+'/POINTING' theptabs.append(theptab) if nochangeinpointing and os.path.islink(theptab): #print "is link ", theptab os.remove(theptab) shutil.copytree(emptyptab, theptab) replaced.append(True) else: replaced.append(False) # run oldsplit outputviss.append(os.path.abspath(tempout+'/'+os.path.basename(m))) # end for # send off the jobs print 'Running split_core ... ' helper = ParallelTaskHelper('oldsplit', mylocals) helper.override_arg('outputvis',outputviss) helper._consolidateOutput = False goretval = helper.go() for i in xrange(len(mses)): m = mses[i] # deal with the POINTING table if replaced[i]: # restore link shutil.rmtree(theptabs[i], ignore_errors=True) os.symlink('../'+os.path.basename(mastersubms)+'/POINTING', theptabs[i]) # (link in target will be created my makeMMS) # accumulate list of successful splits if not goretval[m]: nfail+=1 else: successfulmses.append(outputviss[i]) if nfail>0: # there were unsuccessful splits if len(successfulmses)==0: casalog.post('Split failed in all subMSs.', 'WARN') rval=False else: casalog.post('*** Summary: there were failures in '+str(nfail)+' SUBMSs:', 'WARN') casalog.post('*** (these are harmless if they are caused by selection):', 'WARN') for m in mses: if not goretval[m]: casalog.post(os.path.basename(m)+': '+str(goretval[m]), 'WARN') else: casalog.post(os.path.basename(m)+': '+str(goretval[m]), 'NORMAL') casalog.post('Will construct MMS from subMSs with successful selection ...', 'NORMAL') if nochangeinpointing: # need to take care of POINTING table # in case the master subms did not make it if not (tempout+'/'+os.path.basename(mastersubms) in successfulmses): # old master subms was not selected. # copy the original masterptab into the new master shutil.rmtree(successfulmses[0]+'/POINTING') shutil.copytree(masterptab, successfulmses[0]+'/POINTING') if rval: # construct new MMS from the output if(width==1 and str(field)+str(spw)+str(antenna)+str(timerange)+str(scan)+str(intent)\ +str(array)+str(uvrange)+str(correlation)+str(observation)==''): ph.makeMMS(outputvis, successfulmses) else: myms.open(successfulmses[0], nomodify=False) auxfile = "split_aux_"+str(time.time()) for i in xrange(1,len(successfulmses)): myms.virtconcatenate(successfulmses[i], auxfile, '1Hz', '10mas', True) myms.close() os.remove(auxfile) ph.makeMMS(outputvis, successfulmses, True, ['POINTING']) shutil.rmtree(tempout, ignore_errors=True) else: # do not output an MMS rval = split_core(vis, outputvis, datacolumn, field, spw, width, antenna, timebin, timerange, scan, intent, array, uvrange, correlation, observation, combine, keepflags) except Exception, instance: casalog.post("*** Error: %s" % (instance), 'SEVERE') rval = False
def postExecution(self): ''' This overrides the post execution portion of the task helper in this case we probably need to generate the output reference ms. ''' if self._arg['createmms']: casalog.post("Finalizing MMS structure") # restore POINTING and SYSCAL if self.pwriteaccess and not self.pointingisempty: print "restoring POINTING" os.system('rm -rf '+self.ptab) # remove empty copy os.system('mv '+self.dataDir+'/POINTING '+self.ptab) if self.swriteaccess and not self.syscalisempty: print "restoring SYSCAL" os.system('rm -rf '+self.stab) # remove empty copy os.system('mv '+self.dataDir+'/SYSCAL '+self.stab) # jagonzal (CAS-4287): Add a cluster-less mode to by-pass parallel processing for MMSs as requested if (ParallelTaskHelper.getBypassParallelProcessing()==1): outputList = self._sequential_return_list self._sequential_return_list = {} else: outputList = self._jobQueue.getOutputJobs() # We created a data directory and many SubMSs, # now build the reference MS if self._arg['calmsselection'] in ['auto','manual']: # A Cal MS was created in the data directory, see if it was # successful, if so build a multi-MS if os.path.exists(self._arg['calmsname']): raise ValueError, "Output MS already exists" self._msTool.createmultims(self._arg['calmsname'], [self.dataDir +'/%s.cal.ms'%self.outputBase]) subMSList = [] if (ParallelTaskHelper.getBypassParallelProcessing()==1): for subMS in outputList: subMSList.append(subMS) else: for job in outputList: if job.status == 'done': subMSList.append(job.getCommandArguments()['outputvis']) subMSList.sort() if len(subMSList) == 0: casalog.post("Error: no subMSs were created.", 'WARN') return False mastersubms = subMSList[0] subtabs_to_omit = [] # deal with POINTING table if not self.pointingisempty: shutil.rmtree(mastersubms+'/POINTING', ignore_errors=True) shutil.copytree(self.ptab, mastersubms+'/POINTING') # master subms gets a full copy of the original if self.makepointinglinks: for i in xrange(1,len(subMSList)): theptab = subMSList[i]+'/POINTING' shutil.rmtree(theptab, ignore_errors=True) os.symlink('../'+os.path.basename(mastersubms)+'/POINTING', theptab) # (link in target will be created my makeMMS) subtabs_to_omit.append('POINTING') # deal with SYSCAL table if not self.syscalisempty: shutil.rmtree(mastersubms+'/SYSCAL', ignore_errors=True) shutil.copytree(self.stab, mastersubms+'/SYSCAL') # master subms gets a full copy of the original if self.makesyscallinks: for i in xrange(1,len(subMSList)): thestab = subMSList[i]+'/SYSCAL' shutil.rmtree(thestab, ignore_errors=True) os.symlink('../'+os.path.basename(mastersubms)+'/SYSCAL', thestab) # (link in target will be created my makeMMS) subtabs_to_omit.append('SYSCAL') ph.makeMMS(self._arg['outputvis'], subMSList, True, # copy subtables subtabs_to_omit # omitting these ) thesubmscontainingdir = os.path.dirname(subMSList[0].rstrip('/')) os.rmdir(thesubmscontainingdir) return True
def concat(vislist,concatvis,freqtol,dirtol,respectname,timesort,copypointing, visweightscale, forcesingleephemfield): """concatenate visibility datasets The list of data sets given in the vis argument are chronologically concatenated into an output data set in concatvis, i.e. the data sets in vis are first ordered by the time of their earliest integration and then concatenated. If there are fields whose direction agrees within the direction tolerance (parameter dirtol), the actual direction in the resulting, merged output field will be the one from the chronologically first input MS. If concatvis already exists (e.g., it is the same as the first input data set), then the other input data sets will be appended to the concatvis data set. There is no limit to the number of input data sets. If none of the input data sets have any scratch columns (model and corrected columns), none are created in the concatvis. Otherwise these columns are created on output and initialized to their default value (1 in model column, data in corrected column) for those data with no input columns. Spectral windows for each data set with the same chanelization, and within a specified frequency tolerance of another data set will be combined into one spectral window. A field position in one data set that is within a specified direction tolerance of another field position in any other data set will be combined into one field. The field names need not be the same---only their position is used. Each appended dataset is assigned a new observation id (provided the entries in the observation table are indeed different). Keyword arguments: vis -- Name of input visibility files to be combined default: none; example: vis = ['src2.ms','ngc5921.ms','ngc315.ms'] concatvis -- Name of visibility file that will contain the concatenated data note: if this file exits on disk then the input files are added to this file. Otherwise the new file contains the concatenated data. Be careful here when concatenating to an existing file. default: none; example: concatvis='src2.ms' example: concatvis='outvis.ms' other examples: concat(vis=['src2.ms','ngc5921.ms'], concatvis='src2.ms') will concatenate 'ngc5921.ms' into 'src2.ms', and the original src2.ms is lost concat(vis=['src2.ms','ngc5921.ms'], concatvis='out.ms') will concatenate 'ngc5921.ms' and 'src2.ms' into a file named 'out.ms'; the original 'ngc5921.ms' and 'src2.ms' are untouched. concat(vis=['v1.ms','v2.ms'], concatvis = 'vall.ms') then concat(vis=['v3.ms','v4.ms'], concatvis = 'vall.ms') vall.ms will contains v1.ms+v2.ms+v3.ms+v4.ms Note: run flagmanager to save flags in the concatvis freqtol -- Frequency shift tolerance for considering data to be in the same spwid. The number of channels must also be the same. default: '' == 1 Hz example: freqtol='10MHz' will not combine spwid unless they are within 10 MHz. Note: This option is useful to combine spectral windows with very slight frequency differences caused by Doppler tracking, for example. dirtol -- Direction shift tolerance for considering data as the same field default: '' == 1 mas (milliarcsec) example: dirtol='1.arcsec' will not combine data for a field unless their phase center differ by less than 1 arcsec. If the field names are different in the input data sets, the name in the output data set will be the first relevant data set in the list. respectname -- If true, fields with a different name are not merged even if their direction agrees (within dirtol) default: False timesort -- If true, the output visibility table will be sorted in time. default: false. Data in order as read in. example: timesort=true Note: There is no constraint on data that is simultaneously observed for more than one field; for example multi-source correlation of VLBA data. copypointing -- Make a proper copy of the POINTING subtable (can be time consuming). If False, the result is an empty POINTING table. default: True visweightscale -- The weights of the individual MSs will be scaled in the concatenated output MS by the factors in this list. Useful for handling heterogeneous arrays. Use plotms to inspect the "Wt" column as a reference for determining the scaling factors. See the cookbook for more details. example: [1.,3.,3.] - scale the weights of the second and third MS by a factor 3. default: [] (empty list) - no scaling forcesingleephemfield -- By default, concat will only merge two ephemeris fields if the first ephemeris covers the time range of the second. Otherwise, two separate fields with separate ephemerides are placed in the output MS. In order to override this behaviour and make concat merge the non-overlapping or only partially overlapping input ephemerides, the name or id of the field in question needs to be placed into the list in parameter 'forcesingleephemfield'. example: ['Neptune'] - will make sure that there is only one joint ephemeris for field Neptune in the output MS default: '' - standard treatment of all ephemeris fields """ ### #Python script try: casalog.origin('concat') t = tbtool() m = mstool() #break the reference between vis and vislist as we modify vis if(type(vislist)==str): vis=[vislist] else: vis=list(vislist) #dto. for concavis theconcatvis = concatvis # warn if there are MMSs mmslist = [] for elvis in vis : ###Oh no Elvis does not exist Mr Bill if(ParallelTaskHelper.isParallelMS(elvis)): mmslist.append(elvis) if len(mmslist)>0: if (vis[0] == mmslist[0]): casalog.post('*** The first input MS is a multi-MS to which no row can be added. Cannot proceed.', 'WARN') casalog.post('*** Please use virtualconcat or convert the first input MS to a normal MS using split.', 'WARN') raise Exception, 'Cannot append to a multi-MS. Please use virtualconcat.' casalog.post('*** The following input measurement sets are multi-MSs', 'INFO') for mname in mmslist: casalog.post('*** '+mname, 'INFO') casalog.post('*** Use virtualconcat to produce a single multi-MS from several multi-MSs.', 'INFO') doweightscale = False if(len(visweightscale)>0): if (len(visweightscale) != len(vis)): raise Exception, 'parameter visweightscale must have same number of elements as parameter vis' for factor in visweightscale: if factor<0.: raise Exception, 'parameter visweightscale must only contain positive numbers' elif factor!=1.: doweightscale=True # process the input MSs in chronological order sortedvis = [] sortedvisweightscale = [] namestuples = [] for name in vis: t.open(name) times = t.getcol('TIME') t.close() times.sort() if doweightscale: namestuples.append( (times[0], name, visweightscale[vis.index(name)]) ) else: namestuples.append( (times[0], name, 0) ) sorted_namestuples = sorted(namestuples, key=lambda msname: msname[0]) for i in range(0,len(vis)): sortedvis.append(sorted_namestuples[i][1]) sortedvisweightscale.append(sorted_namestuples[i][2]) if((type(concatvis)!=str) or (len(concatvis.split()) < 1)): raise Exception, 'parameter concatvis is invalid' existingconcatvis = False if(vis.count(concatvis) > 0): existingconcatvis = True cvisindex = sortedvis.index(concatvis) if not sorted_namestuples[cvisindex][0] == sorted_namestuples[0][0]: raise Exception, 'If concatvis is set to the name of an existing MS in vis, it must be the chronologically first.'+\ '\n I.e. in this case you should set concatvis to '+sortedvis[0] sortedvis.pop(cvisindex) if doweightscale: vwscale = sortedvisweightscale[cvisindex] sortedvisweightscale.pop(cvisindex) sortedvisweightscale = [vwscale] + sortedvisweightscale # move the corresponding weight to the front if not vis == sortedvis: casalog.post('The list of input MSs is not in chronological order and will need to be sorted.' , 'INFO') casalog.post('The chronological order in which the concatenation will take place is:' , 'INFO') if existingconcatvis: casalog.post(' MJD '+str(qa.splitdate(qa.quantity(sorted_namestuples[0][0],'s'))['mjd'])+': '+concatvis, 'INFO') for name in sortedvis: casalog.post(' MJD '+str(qa.splitdate(qa.quantity(sorted_namestuples[sortedvis.index(name)][0],'s'))['mjd'])+': '+name, 'INFO') if doweightscale: casalog.post('In this new order, the weights are:'+str(sortedvisweightscale) , 'INFO') # replace the original vis and visweightscale by the sorted ones (with concatvis removed if it exists) vis = sortedvis visweightscale = sortedvisweightscale if(os.path.exists(concatvis)): casalog.post('Will be concatenating into the existing ms '+concatvis , 'WARN') if doweightscale and not existingconcatvis: visweightscale = [1.]+visweightscale # set the weight for this existing MS to 1. casalog.post('The weights for this existing MS will be left unchanged.' , 'WARN') else: if(len(vis) >0): # (note: in case len is 1, we only copy, essentially) casalog.post('copying '+vis[0]+' to '+theconcatvis , 'INFO') shutil.copytree(vis[0], theconcatvis) # note that the resulting copy is writable even if the original was read-only vis.pop(0) # don't need to pop visweightscale here! if not copypointing: # remove the rows from the POINTING table of the first MS casalog.post('*** copypointing==False: resulting MS will have empty POINTING table.', 'INFO') tmptabname = 'TMPPOINTING'+str(time.time()) shutil.rmtree(tmptabname, ignore_errors=True) shutil.move(theconcatvis+'/POINTING', tmptabname) t.open(tmptabname) if(t.nrows()>0): ttab = t.copy(newtablename=theconcatvis+'/POINTING', deep=False, valuecopy=True, norows=True) ttab.close() t.close() shutil.rmtree(tmptabname, ignore_errors=True) else: # the POINTING table is already empty casalog.post('*** Input POINTING table was already empty.', 'INFO') shutil.move(tmptabname, theconcatvis+'/POINTING') t.close() # handle the ephemeris concatenation if not forcesingleephemfield=='': from recipes.ephemerides import concatephem if type(forcesingleephemfield)==str or type(forcesingleephemfield)==int: forcesingleephemfield = [forcesingleephemfield] if not type(forcesingleephemfield) == list: raise Exception, 'Type of parameter forcesingleephemfield must be str, int, or list' themss = [theconcatvis] for x in vis: themss.append(x) for ephemfield in forcesingleephemfield: if not type(ephemfield)==int: ephemfield = str(ephemfield) casalog.post('*** Forcing single ephemeris for field '+str(ephemfield), 'INFO') thetabs = concatephem.findephems(themss, ephemfield) if thetabs != [] and not ('' in thetabs): tmptab = os.path.basename(thetabs[0])+'.concattmp' targettab = theconcatvis+'/FIELD/'+os.path.basename(thetabs[0]) if not os.path.exists(targettab): raise Exception, 'Internal ERROR: ephemeris '+targettab+' does not exist' concatephem.concatephem(thetabs, tmptab) if os.path.exists(tmptab): os.system('rm -rf '+targettab) os.system('mv '+tmptab+' '+targettab) else: casalog.post('ERROR while forcing single ephemeris for field '+str(ephemfield), 'SEVERE') raise Exception, 'Concatenation of ephemerides for field '+str(ephemfield)+' failed.' else: casalog.post('ERROR while forcing single ephemeris for field '+str(ephemfield), 'SEVERE') raise Exception, 'Cannot find ephemerides for field '+str(ephemfield)+' in all input MSs.' # Determine if scratch columns should be considered at all # by checking if any of the MSs has them. considerscrcols = False considercorr = False considermodel = False needscrcols = [] needmodel = [] needcorr = [] if ((type(theconcatvis)==str) and (os.path.exists(theconcatvis))): # check if all scratch columns are present t.open(theconcatvis) if (t.colnames().count('MODEL_DATA')==1): considermodel = True if(t.colnames().count('CORRECTED_DATA')==1): considercorr = True needscrcols.append(t.colnames().count('CORRECTED_DATA')==0 or t.colnames().count('MODEL_DATA')==0) needmodel.append(t.colnames().count('MODEL_DATA')==0) needcorr.append(t.colnames().count('CORRECTED_DATA')==0) t.close() else: raise Exception, 'Visibility data set '+theconcatvis+' not found - please verify the name' for elvis in vis : ###Oh no Elvis does not exist Mr Bill if(not os.path.exists(elvis)): raise Exception, 'Visibility data set '+elvis+' not found - please verify the name' # check if all scratch columns are present t.open(elvis) if (t.colnames().count('MODEL_DATA')==1): considermodel = True if(t.colnames().count('CORRECTED_DATA')==1): considercorr = True needscrcols.append(t.colnames().count('CORRECTED_DATA')==0 or t.colnames().count('MODEL_DATA')==0) needmodel.append(t.colnames().count('MODEL_DATA')==0) needcorr.append(t.colnames().count('CORRECTED_DATA')==0) t.close() considerscrcols = (considercorr or considermodel) # there are scratch columns # start actual work, file existence has already been checked i = 0 if(considerscrcols and needscrcols[i]): # create scratch cols casalog.post('creating scratch columns in '+theconcatvis , 'INFO') _cb.open(theconcatvis, addcorr=(considercorr and needcorr[i]), addmodel=(considermodel and needmodel[i])) # calibrator-open creates scratch columns _cb.close() # scale the weights and sigma of the first MS in the chain if doweightscale: wscale = visweightscale[i] if(wscale==1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post('Scaling weights for first MS by factor '+str(wscale), 'INFO') t.open(theconcatvis, nomodify=False) for colname in [ 'WEIGHT', 'WEIGHT_SPECTRUM']: if (colname in t.colnames()) and (t.iscelldefined(colname,0)): for j in xrange(0,t.nrows()): a = t.getcell(colname, j) a *= wscale t.putcell(colname, j, a) for colname in ['SIGMA']: if (wscale > 0. and colname in t.colnames()) and (t.iscelldefined(colname,0)): sscale = 1./sqrt(wscale) for j in xrange(0,t.nrows()): a = t.getcell(colname, j) a *= sscale t.putcell(colname, j, a) t.close() # determine handling switch value handlingswitch = 0 if not copypointing: handlingswitch = 2 m.open(theconcatvis,nomodify=False) mmsmembers = [theconcatvis] for elvis in vis : i = i + 1 destms = "" casalog.post('concatenating '+elvis+' into '+theconcatvis , 'INFO') wscale = 1. if doweightscale: wscale = visweightscale[i] if(wscale==1.): casalog.post('Will leave the weights for this MS unchanged.', 'INFO') else: casalog.post('Will scale weights for this MS by factor '+str(wscale) , 'INFO') if(considerscrcols and needscrcols[i]): if(ParallelTaskHelper.isParallelMS(elvis)): raise Exception, 'Cannot create scratch columns in a multi-MS. Use virtualconcat.' else: # create scratch cols casalog.post('creating scratch columns for '+elvis+' (original MS unchanged)', 'INFO') tempname = elvis+'_with_scrcols' shutil.rmtree(tempname, ignore_errors=True) shutil.copytree(elvis, tempname) _cb.open(tempname, addcorr=(considercorr and needcorr[i]), addmodel=(considermodel and needmodel[i])) # calibrator-open creates scratch columns _cb.close() # concatenate copy instead of original file m.concatenate(msfile=tempname,freqtol=freqtol,dirtol=dirtol,respectname=respectname, weightscale=wscale,handling=handlingswitch, destmsfile=destms) shutil.rmtree(tempname, ignore_errors=True) else: m.concatenate(msfile=elvis,freqtol=freqtol,dirtol=dirtol,respectname=respectname, weightscale=wscale,handling=handlingswitch, destmsfile=destms) if timesort: casalog.post('Sorting main table by TIME ...', 'INFO') m.timesort() m.writehistory(message='taskname=concat',origin='concat') m.writehistory(message='vis = "'+str(vis)+'"',origin='concat') m.writehistory(message='concatvis = "'+str(concatvis)+'"',origin='concat') m.writehistory(message='freqtol = "'+str(freqtol)+'"',origin='concat') m.writehistory(message='dirtol = "'+str(dirtol)+'"',origin='concat') m.writehistory(message='respectname = "'+str(respectname)+'"',origin='concat') m.writehistory(message='copypointing = "'+str(copypointing)+'"',origin='concat') m.writehistory(message='visweightscale = "'+str(visweightscale)+'"',origin='concat') m.writehistory(message='forcesingleephemfield = "'+str(forcesingleephemfield)+'"',origin='concat') m.close() return True except Exception, instance: print '*** Error ***',instance raise Exception, instance
raise Exception, 'Tsys calibration table %s not found' % tsystable if len(spwmap)==0: spwmap=[-1] if interp=="": interp="linear" # ... and we are asked to do something... # open without adding anything! mycb.open(vis,compress=False,addcorr=False,addmodel=False) mycb.initweights(wtmode=wtmode,dowtsp=dowtsp,tsystable=tsystable,gainfield=gainfield,interp=interp,spwmap=spwmap) mycb.close() else: raise Exception, 'Visibility data set not found - please verify the name' # Write history to MS. # When running in parallel, history will be written in the parallel section above # normal MSs should write the history here if ParallelTaskHelper.isMPIClient(): try: param_names = initweights.func_code.co_varnames[:initweights.func_code.co_argcount] param_vals = [eval(p) for p in param_names] casalog.post('Updating the history in the output', 'DEBUG1') write_history(myms, vis, 'initweights', param_names, param_vals, casalog) except Exception, instance: casalog.post("*** Error \'%s\' updating HISTORY" % (instance), 'WARN') except Exception, instance: print '*** Error ***',instance
def applycal( vis=None, field=None, spw=None, intent=None, selectdata=None, timerange=None, uvrange=None, antenna=None, scan=None, observation=None, msselect=None, docallib=None, callib=None, gaintable=None, gainfield=None, interp=None, spwmap=None, calwt=None, parang=None, applymode=None, flagbackup=None, ): # Python script casalog.origin("applycal") # Take care of the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): # Back up the flags, if requested (and if necessary) if flagbackup and applymode != "calonly" and applymode != "trial": fh.backupFlags(aflocal=None, msfile=vis, prename="applycal") flagbackup = False # To be safe convert file names to absolute paths. gaintable = ParallelTaskHelper.findAbsPath(gaintable) helper = ParallelTaskHelper("applycal", locals()) ret = helper.go() if ParallelTaskHelper.getAsyncMode(): return ret else: return try: mycb = cbtool() if (type(vis) == str) & os.path.exists(vis): # add CORRECTED_DATA column mycb.open(filename=vis, compress=False, addcorr=True, addmodel=False) else: raise Exception, "Visibility data set not found - please verify the name" # enforce default if unspecified if applymode == "": applymode = "calflag" # Back up the flags, if requested (and if necessary) if flagbackup and applymode != "calonly" and applymode != "trial": fh.backupFlags(aflocal=None, msfile=vis, prename="applycal") # Do data selection according to selectdata if selectdata: # pass all data selection parameters in as specified mycb.selectvis( time=timerange, spw=spw, scan=scan, field=field, intent=intent, observation=str(observation), baseline=antenna, uvrange=uvrange, chanmode="none", msselect=msselect, ) else: # selectdata=F, so time,scan,baseline,uvrange,msselect='' # using spw and field specifications only mycb.selectvis( time="", spw=spw, scan="", field=field, intent=intent, observation="", baseline="", uvrange="", chanmode="none", msselect="", ) # Arrange applies.... if docallib: # by cal library from file mycallib = callibrary() mycallib.read(callib) mycb.setcallib(mycallib.cld) else: # by traditional parameters ngaintab = 0 if gaintable != [""]: ngaintab = len(gaintable) ncalwt = len(calwt) if ncalwt == 1: calwt = [calwt[0] for i in range(ngaintab)] ngainfld = len(gainfield) nspwmap = len(spwmap) ninterp = len(interp) # handle list of list issues with spwmap if nspwmap > 0: if type(spwmap[0]) != list: # first element not a list, only one spwmap specified # make it a list of list spwmap = [spwmap] nspwmap = 1 for igt in range(ngaintab): if gaintable[igt] != "": # field selection is null unless specified thisgainfield = "" if igt < ngainfld: thisgainfield = gainfield[igt] # spwmap is null unless specifed thisspwmap = [-1] if igt < nspwmap: thisspwmap = spwmap[igt] # interp is 'linear' unless specified thisinterp = "linear" if igt < ninterp: if interp[igt] == "": interp[igt] = thisinterp thisinterp = interp[igt] mycb.setapply( t=0.0, table=gaintable[igt], field=thisgainfield, calwt=calwt[igt], spwmap=thisspwmap, interp=thisinterp, ) # ...and now the specialized terms # Apply parallactic angle, if requested if parang: mycb.setapply(type="P") mycb.correct(applymode) # report what the flags did reportflags(mycb.activityrec()) mycb.close() # write history try: param_names = applycal.func_code.co_varnames[: applycal.func_code.co_argcount] param_vals = [eval(p) for p in param_names] write_history(mstool(), vis, "applycal", param_names, param_vals, casalog) except Exception, instance: casalog.post("*** Error '%s' updating HISTORY" % instance, "WARN") except Exception, instance: print "*** Error ***", instance mycb.close() casalog.post("Error in applycal: %s" % str(instance), "SEVERE") raise Exception, "Error in applycal: " + str(instance)
# Path for data datapath = os.environ.get('CASAPATH').split()[0] + "/data/regression/unittest/flagdata/" # Pick up alternative data directory to run tests on MMSs testmms = False if os.environ.has_key('TEST_DATADIR'): DATADIR = str(os.environ.get('TEST_DATADIR'))+'/flagdata/' if os.path.isdir(DATADIR): testmms = True datapath = DATADIR print 'flagmanager tests will use data from '+datapath # jagonzal (CAS-4287): Add a cluster-less mode to by-pass parallel processing for MMSs as requested if os.environ.has_key('BYPASS_PARALLEL_PROCESSING'): ParallelTaskHelper.bypassParallelProcessing(1) # Local copy of the agentflagger tool aflocal = aftool() # Base class which defines setUp functions for importing different data sets class test_base(unittest.TestCase): def setUp_flagdatatest(self): '''VLA data set, scan=2500~2600 spw=0 1 chan, RR,LL''' self.vis = "flagdatatest.ms" if testmms: self.vis = "flagdatatest.mms" if os.path.exists(self.vis): print "The MS is already around, just unflag"
def applycal( vis=None, field=None, spw=None, intent=None, selectdata=None, timerange=None, uvrange=None, antenna=None, scan=None, observation=None, msselect=None, docallib=None, callib=None, gaintable=None, gainfield=None, interp=None, spwmap=None, calwt=None, parang=None, applymode=None, flagbackup=None, ): # Python script casalog.origin('applycal') # Take care of the trivial parallelization if ParallelTaskHelper.isParallelMS(vis): # Back up the flags, if requested (and if necessary) if flagbackup and applymode != 'calonly' and applymode != 'trial': fh.backupFlags(aflocal=None, msfile=vis, prename='applycal') flagbackup = False # To be safe convert file names to absolute paths. gaintable = ParallelTaskHelper.findAbsPath(gaintable) helper = ParallelTaskHelper('applycal', locals()) ret = helper.go() if ParallelTaskHelper.getAsyncMode(): return ret else: return try: mycb = cbtool() if (type(vis) == str) & os.path.exists(vis): # add CORRECTED_DATA column mycb.open(filename=vis, compress=False, addcorr=True, addmodel=False) else: raise Exception, \ 'Visibility data set not found - please verify the name' # enforce default if unspecified if applymode == '': applymode = 'calflag' # Back up the flags, if requested (and if necessary) if flagbackup and applymode != 'calonly' and applymode \ != 'trial': fh.backupFlags(aflocal=None, msfile=vis, prename='applycal') # Do data selection according to selectdata if selectdata: # pass all data selection parameters in as specified mycb.selectvis( time=timerange, spw=spw, scan=scan, field=field, intent=intent, observation=str(observation), baseline=antenna, uvrange=uvrange, chanmode='none', msselect=msselect, ) else: # selectdata=F, so time,scan,baseline,uvrange,msselect='' # using spw and field specifications only mycb.selectvis( time='', spw=spw, scan='', field=field, intent=intent, observation='', baseline='', uvrange='', chanmode='none', msselect='', ) # Arrange applies.... if docallib: # by cal library from file # parsing using c++ parser thiscallib=mycb.parsecallibfile(callib) mycb.setcallib(thiscallib) else: # by traditional parameters ngaintab = 0 if gaintable != ['']: ngaintab = len(gaintable) ncalwt = len(calwt) if ncalwt == 1: calwt = [calwt[0] for i in range(ngaintab)] ngainfld = len(gainfield) nspwmap = len(spwmap) ninterp = len(interp) # handle list of list issues with spwmap if nspwmap > 0: if type(spwmap[0]) != list: # first element not a list, only one spwmap specified # make it a list of list spwmap = [spwmap] nspwmap = 1 for igt in range(ngaintab): if gaintable[igt] != '': # field selection is null unless specified thisgainfield = '' if igt < ngainfld: thisgainfield = gainfield[igt] # spwmap is null unless specifed thisspwmap = [-1] if igt < nspwmap: thisspwmap = spwmap[igt] # interp is 'linear' unless specified thisinterp = 'linear' if igt < ninterp: if interp[igt] == '': interp[igt] = thisinterp thisinterp = interp[igt] mycb.setapply( t=0.0, table=gaintable[igt], field=thisgainfield, calwt=calwt[igt], spwmap=thisspwmap, interp=thisinterp, ) # ...and now the specialized terms # Apply parallactic angle, if requested if parang: mycb.setapply(type='P') mycb.correct(applymode) # report what the flags did reportflags(mycb.activityrec()) mycb.close() # write history try: param_names = \ applycal.func_code.co_varnames[:applycal.func_code.co_argcount] param_vals = [eval(p) for p in param_names] write_history( mstool(), vis, 'applycal', param_names, param_vals, casalog, ) except Exception, instance: casalog.post("*** Error \'%s\' updating HISTORY" % instance, 'WARN') except Exception, instance: print '*** Error ***', instance mycb.close() casalog.post("Error in applycal: %s" % str(instance), "SEVERE") raise Exception, "Error in applycal: "+str(instance)