コード例 #1
0
class test_mpi4casa_plotms(unittest.TestCase):

    def setUp(self):
        
        self.vis = 'Four_ants_3C286.mms'
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
    
    def test_mpi4casa_plotms_concurrent(self):
        """Run plotms on the same MS from each server simulateneously"""
        
        # Change current working directory
        self.client.push_command_request("os.chdir('%s')" % os.getcwd(),True,self.server_list)
        
        # Farm plotms jobs
        command_request_id_list = []
        for server in self.server_list:
            plotfile = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server)
            cmd = "plotms('%s', avgchannel='8',avgtime='60',plotfile='%s',showgui=False)" % (self.vis,plotfile)
            command_request_id = self.client.push_command_request(cmd,False,server)
            command_request_id_list.append(command_request_id[0])
            
        # Get response in block mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Compare files
        for server_idx in range(0,len(self.server_list)):
            for server_idy in range(server_idx+1,len(self.server_list)):
                server_x = self.server_list[server_idx]
                server_y = self.server_list[server_idy]
                plotfile_server_idx = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server_x)
                plotfile_server_idy = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server_y)
                areEqual = filecmp.cmp(plotfile_server_idx,plotfile_server_idy)
                self.assertTrue(areEqual,"Plotfile generated by server %s is different from plotfile generated by server %s" 
                                % (str(server_x),str(server_y)))
コード例 #2
0
class test_mpi4casa_log_level(unittest.TestCase):

    def setUp(self):
        
        self.vis = "Four_ants_3C286.mms"
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          
                
        self.client.push_command_request("import os",True,self.server_list)

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
        
        # Restore log file and level
        self.client.push_command_request("casalog.setlogfile(casa['files']['logfile'])",True,self.server_list)
        self.client.set_log_level("INFO")
    
    def test_mpi4casa_log_level_default_to_debug(self):
        """Test changing globally log level from default to debug """
            
        # Change log level globally (test via MPIInterface as it internally uses MPICommandClient so both are tested)
        mpi_interface = MPIInterface()
        mpi_interface.set_log_level("DEBUG")    
                
        # Use a separated log file per server to facilitate analysis
        for server in self.server_list:
            logfile = 'test_mpi4casa_log_level_debug-server-%s.log' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s')" % (logfile),True,server)        
            
        # Run flagdata 
        flagdata(vis=self.vis, mode='summary')  
        
        # Iterate trough log files to see if we find command handling msgs
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/test_mpi4casa_log_level_debug-server-%s.log' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MPICommandServer")<0, True, "MPICommandServer msgs should be filtered out")                 
コード例 #3
0
 def test_singleton_behaviour(self):
     
     # Delete current MPICommandClient singleton instance reference
     client_ref = self.client
     del client_ref
     
     # Create a new MPICommandClient singleton instance reference
     new_client_ref = MPICommandClient()
     
     # Execute some command
     command_response_list = new_client_ref.push_command_request("a+b",True,[self.server_list[0]],{'a':1,'b':1})
     
     # Analyze command response list contents
     self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
     self.assertEqual(command_response_list[0]['successful'], True, "Command execution was not successful")
     self.assertEqual(command_response_list[0]['traceback'], None, "Command execution trace-back should be None")
     self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
     self.assertEqual(command_response_list[0]['ret'], 2, "Command return variable should be 2")       
コード例 #4
0
class test_MPICommandClient(unittest.TestCase):
       
    def setUp(self):
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.server_list = MPIEnvironment.mpi_server_rank_list()
        self.client.start_services()
                            
    def test_exec_multiple_target_non_blocking_mode_str_params_successful(self):
        
        command_request_id_list = self.client.push_command_request("import time; time.sleep(3)",False,[self.server_list[0],self.server_list[1]])
        
        # Try to get responses before time in non-blocking more
        command_response_list = self.client.get_command_response(command_request_id_list,False,True)
        
        # Get response in blocking mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 2, "Command response list should contain two elements")
        for command_response in command_response_list:
            self.assertEqual(command_response['successful'], True, "Command execution was not successful")
            self.assertEqual(command_response['traceback'], None, "Command execution trace-back should be None")
            self.assertEqual(command_response['status'], 'response received', "Command status should be 'response received'")
            self.assertEqual(command_response['ret'], None, "Command return variable should be None")
                    
    def test_eval_multiple_target_blocking_mode_str_params_successful(self):
        
        command_response_list = self.client.push_command_request("1+1",True,[self.server_list[0],self.server_list[1]])
        self.assertEqual(len(command_response_list), 2, "Command response list should contain two elements")
        for command_response in command_response_list:
            self.assertEqual(command_response['successful'], True, "Command execution was not successful")
            self.assertEqual(command_response['traceback'], None, "Command execution trace-back should be None")
            self.assertEqual(command_response['status'], 'response received', "Command status should be 'response received'")
            self.assertEqual(command_response['ret'], 2, "Command return variable should be 2")
                        
    def test_eval_undefined_target_non_blocking_mode_dict_params_not_successful(self):
        
        command_request_id_list = self.client.push_command_request("pow(a,b)",False,None,{'a':'test','b':2})
        
        # Try to get responses before time in non-blocking more
        command_response_list = self.client.get_command_response(command_request_id_list,False,True)
        
        # Get response in blocking mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
        self.assertEqual(command_response_list[0]['successful'], False, "Command execution was successful")
        self.assertEqual(command_response_list[0]['traceback'].find("TypeError:")>=0, True, "Trace-back should contain TypeError")
        self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
        self.assertEqual(command_response_list[0]['ret'], None, "Command return variable should be None")
                    
    def test_singleton_behaviour(self):
        
        # Delete current MPICommandClient singleton instance reference
        client_ref = self.client
        del client_ref
        
        # Create a new MPICommandClient singleton instance reference
        new_client_ref = MPICommandClient()
        
        # Execute some command
        command_response_list = new_client_ref.push_command_request("a+b",True,[self.server_list[0]],{'a':1,'b':1})
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
        self.assertEqual(command_response_list[0]['successful'], True, "Command execution was not successful")
        self.assertEqual(command_response_list[0]['traceback'], None, "Command execution trace-back should be None")
        self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
        self.assertEqual(command_response_list[0]['ret'], 2, "Command return variable should be 2")       
コード例 #5
0
class test_mpi4casa_NullSelection(unittest.TestCase):

    def setUp(self):
        
        self.vis = "Four_ants_3C286.mms"
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          
                
        self.client.push_command_request("import os",True,self.server_list)

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
        
        # Restore log file and filter
        self.client.push_command_request("casalog.setlogfile(casa['files']['logfile'])",True,self.server_list)        
    
    def test_mpi4casa_NullSelection_entire_mms(self):
        """Test filter out NullSelection exceptions"""
        
        # First clear list of filter out msgs. and make sure that the MSSelectionNullSelection shows up
        for server in self.server_list:
            logfile = 'MSSelectionNullSelection-Not-Filtered.log-server-%s' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s'); casalog.clearFilterMsgList()" % (logfile),True,server)
            
        # Run flagdata selecting a non-existing scan
        flagdata(vis=self.vis, scan='99')  
        
        # Iterate trough log files to see if we find the exception
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/MSSelectionNullSelection-Not-Filtered.log-server-%s' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MSSelectionNullSelection")>0, True, "MSSelectionNullSelection should not be filtered out")

        # Now populate the list of msg to be filter out including MSSelectionNullSelection
        text = ['MSSelectionNullSelection','NeverHappens']
        for server in self.server_list:
            logfile = 'MSSelectionNullSelection-Filtered.log-server-%s' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s'); casalog.filterMsg(%s)" % (logfile,str(text)),True,server) 
        
        # Run flagdata selecting a non-existing scan
        flagdata(vis=self.vis, scan='99')  
        
        # Iterate trough log files to see if we find the exception
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/MSSelectionNullSelection-Filtered.log-server-%s' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MSSelectionNullSelection")<0, True, "MSSelectionNullSelection should be filtered out")       
コード例 #6
0
ファイル: task_pieflag.py プロジェクト: chrishales/pieflag
def pieflag(vis,
            field,          # data selection parameters
            refchanfile,
            fitorder_RR_LL,
            fitorder_RL_LR,
            scalethresh,
            SEFDfile,       # scalethresh parameter
            plotSEFD,
            dynamicflag,
            chunktime,      # dynamicflag parameters
            stdmax,
            maxoffset,
            staticflag,
            madmax,         # staticflag parameter
            binsamples,
            extendflag,
            boxtime,        # extendflag parameters
            boxthresh):

    #
    # Task pieflag
    #    Flags bad data by comparing with clean channels in bandpass-calibrated data.
    #
    #    Original reference: E. Middelberg, 2006, PASA, 23, 64
    #    Rewritten for use in CASA and updated to account for wideband
    #    and SEFD effects by Christopher A. Hales 2014.
    #
    #    Thanks to Kumar Golap, Justo Gonzalez, Jeff Kern, James Robnett,
    #    Urvashi Rau, Sanjay Bhatnagar, and of course Enno Middelberg
    #    for expert advice. Thanks to Emmanuel Momjian for providing
    #    Jansky VLA SEFD data for L and X bands (EVLA Memos 152 and 166)
    #    and to Bryan Butler for providing access to all other bands
    #    from the Jansky VLA Exposure Calculator.
    #
    #    Version 4.4 released 26 October 2016
    #    Tested with CASA 4.7.0 using Jansky VLA data
    #    Available at: http://github.com/chrishales/pieflag
    #
    #    Reference for this version:
    #    C. A. Hales, E. Middelberg, 2014, Astrophysics Source Code Library, 1408.014
    #    http://adsabs.harvard.edu/abs/2014ascl.soft08014H
    #
    
    startTime = time.time()
    casalog.origin('pieflag')
    casalog.post('--> pieflag version 4.4')
    
    if (not staticflag) and (not dynamicflag):
        casalog.post('*** ERROR: You need to select static or dynamic flagging.', 'ERROR')
        casalog.post('*** ERROR: Exiting pieflag.', 'ERROR')
        return
    
    ms.open(vis)
    vis=ms.name()
    ms.close()
    
    useMPI = MPIEnvironment.is_mpi_enabled
    if useMPI:
        if vis.lower().endswith('.ms'):
            useMPI=False
            casalog.post('--> MS will be processed in serial mode.')
        elif ph.axisType(vis) == 'baseline':
            # client is ID 0 and will not perform parallel processing, servers start from ID 1
            nthreads = MPIEnvironment.rank
            subms_path = vis+'/SUBMSS/'
            subms = filter(lambda x: os.path.isdir(os.path.join(subms_path, x)), os.listdir(subms_path))
            if len(subms) != nthreads:
                casalog.post('*** ERROR: Mismatch, MMS tailored for '+str(len(subms))+' engines but '+\
                                         'CASA session tailored for '+str(nthreads)+' engines.', 'ERROR')
                casalog.post('*** ERROR: Exiting pieflag.', 'ERROR')
                return
            
            server_list = MPIEnvironment.mpi_server_rank_list()
            casalog.post('--> Initializing MPI parallel cluster with '+str(nthreads)+' engines.')
            client = MPICommandClient()
            client.start_services()
            # do some detective work to find appropriate path to push to clients
            syspaths = sys.path
            n = 0
            for k in range(len(syspaths)):
                if os.path.isfile(syspaths[k]+'/mytasks.py'):
                    for line in open(syspaths[k]+'/mytasks.py','r'):
                        if re.search("task_location\['pieflag'\]",line):
                            if n==0:
                                n += 1
                                addpath = syspaths[k]
                            elif syspaths[k] != addpath:
                                n += 1
            
            if n == 1:
                casalog.filter('WARN')
                #client.set_log_level('WARN')
                client.push_command_request("casalog.filter('WARN')",True,server_list)
                client.push_command_request("sys.path.append('"+addpath+"')",True,server_list)
                client.push_command_request('from task_pieflag import pieflag_getflagstats',True,server_list)
                client.push_command_request('from task_pieflag import pieflag_flag',True,server_list)
                casalog.filter('INFO')
            else:
                if n == 0:
                    casalog.post('*** ERROR: pieflag mytasks.py installation not found in sys.path', 'ERROR')
                else:
                    casalog.post('*** ERROR: Ambiguity, sys.path contains more than 1 pieflag installation', 'ERROR')
                    casalog.post('***        (pieflag referenced in '+str(n)+' unique path/mytasks.py)', 'ERROR')
                
                casalog.post('*** ERROR: Exiting pieflag.', 'ERROR')
                return
            
            fcall1 = 'pieflag_getflagstats(vis,field,spw,npol,feedbasis)'
            fcall2 = 'pieflag_flag(vis,datacol,nthreads,field,vtbleLIST,inttime,nant,ddid,spw,refchan,nchan,npol,'+\
                     'feedbasis,fitorderLIST,sefdLIST,staticflag,madmax,binsamples,dynamicflag,chunktime,stdmax,'+\
                     'maxoffset,extendflag,boxtime,boxthresh)'
        else:
            casalog.post('*** ERROR: MMS is not partitioned by baseline. Cannot process.', 'ERROR')
            casalog.post('***        Use partition() to revert to MS then create baseline MMS.', 'ERROR')
            casalog.post('*** ERROR: Exiting pieflag.', 'ERROR')
            return
    else:
        if vis.lower().endswith('.mms'):
            casalog.post('*** ERROR: pieflag cannot handle MMS in non-MPI-enabled CASA session.', 'ERROR')
            casalog.post('*** ERROR: Exiting pieflag.', 'ERROR')
            return
        else:
            casalog.post('--> MS will be processed in serial mode.')
    
    tb.open(vis)
    if any('CORRECTED_DATA' in colnames for colnames in tb.colnames()):
        datacol='CORRECTED_DATA'
    else:
        datacol='DATA'
    
    tb.close()
    
    # load in reference channel details
    # OK, there are probably more elegant ways
    # of implementing the following code...meh
    refchandict=json.load(open(refchanfile))
    spw=[]
    for i in refchandict.keys():
        spw.append(int(i))
    
    nspw=len(spw)
    # json doesn't seem to load in the spw order properly
    # The user might not have entered spw's in order either
    # so perform sort just in case
    # note: no need to perform sort on the string versions
    spw.sort()
    # now get reference channels in corresponding sorted order
    refchan=[]
    for i in range(nspw):
        refchan.append(refchandict[str(spw[i])])
    
    # open MS and select relevant data
    ms.open(vis)
    ms.msselect({'field':str(field)})
    
    # get integration time
    scan_summary = ms.getscansummary()
    ms.close()
    scan_list = []
    for scan in scan_summary:
        if scan_summary[scan]['0']['FieldId'] == field:
            scan_list.append(int(scan))
    
    inttime=scan_summary[str(scan_list[0])]['0']['IntegrationTime']
    # get around potential floating point issues by rounding to nearest 1e-5 seconds
    if inttime != round(inttime,5):
        casalog.post('*** WARNING: It seems your integration time is specified to finer than 1e-5 seconds.','WARN')
        casalog.post('***          pieflag will assume this is a rounding error and carry on.','WARN')
    
    for i in range(len(scan_list)):
        if round(inttime,5) != round(scan_summary[str(scan_list[i])]['0']['IntegrationTime'],5):
            casalog.post('*** ERROR: Bummer, pieflag is not set up to handle '+\
                              'changing integration times throughout your MS.', 'ERROR')
            casalog.post('*** ERROR: Exiting pieflag.','ERROR')
            return
    
    # get number of baselines
    tb.open(vis+'/ANTENNA')
    atble=tb.getcol('NAME')
    tb.close()
    nant=atble.shape[0]
    nbaselines=nant*(nant-1)/2
    
    # channel to frequency (Hz) conversion
    tb.open(vis+'/SPECTRAL_WINDOW')
    vtble=tb.getcol('CHAN_FREQ')
    tb.close()
    # vtble format is vtble[channel][spw]
    # assume each spw has the same number of channels
    nchan=vtble.shape[0]
    
    # check that spw frequencies increase monotonically
    spwcheck=vtble[0,0]
    for s in range(1,len(vtble[0,:])):
        if vtble[0,s]<spwcheck:
            casalog.post("*** ERROR: Your spw's are not ordered with increasing frequency.",'ERROR')
            casalog.post('*** ERROR: Consider splitting your data and restarting pieflag. Exiting','ERROR')
            return
        
        spwcheck=vtble[0,s]
    
    # get number of polarizations, assume they don't change throughout observation
    # get details from the first user-selected spw within the first scan on target field
    # note: I won't assume that spw specifies data_desc_id in the main table, even
    #       though in most cases it probably does. Probably overkill given the lack
    #       of checks done elsewhere in this code...
    tb.open(vis+'/DATA_DESCRIPTION')
    temptb=tb.query('SPECTRAL_WINDOW_ID='+str(spw[0]))
    # while here, get the data_desc_id values that pair with spw number
    tempddid=tb.getcol('SPECTRAL_WINDOW_ID').tolist()
    ddid=[]
    for s in range(nspw):
        ddid.append(tempddid.index(spw[s]))
    
    tb.close()
    polid=temptb.getcell('POLARIZATION_ID')
    tb.open(vis+'/POLARIZATION')
    npol=tb.getcell('NUM_CORR',polid)
    poltype=tb.getcell('CORR_TYPE',polid)
    tb.close()
    
    if not (npol == 2 or npol == 4):
        casalog.post('*** ERROR: Your data contains '+str(npol)+' polarization products.','ERROR')
        casalog.post('*** ERROR: pieflag can only handle 2 (eg RR/LL) or 4 (eg RR/RL/LR/LL). Exiting.','ERROR')
        return
    
    # see stokes.h for details
    if poltype[0] == 5:
        # circular
        feedbasis = 1
    elif poltype[0] == 9:
        #linear
        feedbasis = 0
    else:
        casalog.post('*** ERROR: Your data uses an unsupported feed basis. Exiting','ERROR')
        return
    
    casalog.post('--> Some details about your data:')
    casalog.post('    data column to process = '+datacol)
    casalog.post('    integration time = '+str(inttime)+' sec')
    casalog.post('    number of baselines = '+str(nbaselines))
    casalog.post('    spectral windows to process = '+str(spw))
    casalog.post('    number of channels per spectral window = '+str(nchan))
    if feedbasis:
        casalog.post('    feed basis = circular')
    else:
        casalog.post('    feed basis = linear')
    
    casalog.post('    number of polarization products to process = '+str(npol))
    casalog.post('--> Statistics of pre-existing flags:')
    flag0 = np.zeros((nspw,2*npol+2))
    for i in range(nspw):
        casalog.filter('WARN')
        if useMPI:
            for k in range(nthreads):
                param = {'vis':vis+'/SUBMSS/'+subms[k],'field':field,\
                         'spw':spw[i],'npol':npol,'feedbasis':feedbasis}
                if k == 0:
                    pid = client.push_command_request(fcall1,False,None,param)
                else:
                    pid.append((client.push_command_request(fcall1,False,None,param))[0])
            
            presults = client.get_command_response(pid,True)
            for k in range(nthreads):
                flag0[i] += presults[k]['ret']
            
        else:
            flag0[i] = pieflag_getflagstats(vis,field,spw[i],npol,feedbasis)
        
        casalog.filter('INFO')
        RRs="{:.1f}".format(flag0[i][0]/flag0[i][1]*100.)
        LLs="{:.1f}".format(flag0[i][2]/flag0[i][3]*100.)
        TOTs="{:.1f}".format(flag0[i][4]/flag0[i][5]*100.)
        if npol == 2:
            if feedbasis:
                outstr='    flagged data in spw='+str(spw[i])+':  RR='+RRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    flagged data in spw='+str(spw[i])+':  XX='+RRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        else:
            RLs="{:.1f}".format(flag0[i][6]/flag0[i][7]*100.)
            LRs="{:.1f}".format(flag0[i][8]/flag0[i][9]*100.)
            if feedbasis:
                outstr='    flagged data in spw='+str(spw[i])+':  RR='+RRs+'%  RL='+RLs+'%  LR='+LRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    flagged data in spw='+str(spw[i])+':  XX='+RRs+'%  XY='+RLs+'%  YX='+LRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        
        casalog.post(outstr)
    
    # Check there are enough spectral windows to perform the fitting later on. If not, lower the order.
    if fitorder_RR_LL > nspw-1:
        if fitorder_RR_LL == 2:
            if feedbasis:
                casalog.post('*** WARNING: pieflag needs at least 3 spectral windows to fit for RR or LL spectral curvature.','WARN')
            else:
                casalog.post('*** WARNING: pieflag needs at least 3 spectral windows to fit for XX or YY spectral curvature.','WARN')
        else:
            if feedbasis:
                casalog.post('*** WARNING: pieflag needs at least 2 spectral windows to fit for RR or LL spectral index.','WARN')
            else:
                casalog.post('*** WARNING: pieflag needs at least 2 spectral windows to fit for XX or YY spectral index.','WARN')
        
        if nspw == 2:
            fitorder_RR_LL=1
        else:
            fitorder_RR_LL=0
        
        casalog.post('*** WARNING: fitorder_RR_LL has been reduced to '+str(int(fitorder_RR_LL))+ ' and','WARN')
        casalog.post('***          may be reduced further for some baselines if the','WARN')
        casalog.post('***          reference channel isn\'t available in all selected spw\'s.','WARN')
    
    if npol == 2:
        fitorder    = np.zeros(2)
        fitorder[0] = fitorder_RR_LL
        fitorder[1] = fitorder_RR_LL
    elif npol == 4:
        if fitorder_RL_LR > nspw-1:
            if fitorder_RL_LR == 2:
                casalog.post('*** WARNING: pieflag needs at least 3 spectral windows to fit for RL or LR spectral curvature.','WARN')
            else:
                casalog.post('*** WARNING: pieflag needs at least 2 spectral windows to fit for RL or LR spectral index.','WARN')
            
            if nspw == 2:
                fitorder_RL_LR=1
            else:
                fitorder_RL_LR=0
            
            casalog.post('*** WARNING: fitorder_RL_LR has been reduced to '+str(int(fitorder_RL_LR))+' and','WARN')
            casalog.post('***          may be reduced further for some baselines if the','WARN')
            casalog.post('***          reference channel isn\'t available in all selected spw\'s.','WARN')
        
        fitorder    = np.zeros(4)
        fitorder[0] = fitorder_RR_LL
        fitorder[1] = fitorder_RL_LR
        fitorder[2] = fitorder_RL_LR
        fitorder[3] = fitorder_RR_LL
    
    if scalethresh:
        # read in SEFD data and interpolate to get values at our channel frequencies
        casalog.post('--> Reading in SEFD and interpolating at channel frequencies...')
        sefdRAW=np.loadtxt(SEFDfile)
        sefd=np.zeros((nspw,nchan))
        if not np.all(np.diff(sefdRAW[:,0]) >= 0):
            casalog.post('*** ERROR: Your SEFD file must be in order of increasing frequency.','ERROR')
            casalog.post('*** ERROR: Exiting pieflag.','ERROR')
            return
        
        for i in range(nspw):
            if (vtble[:,spw[i]].min() < sefdRAW[:,0].min()) or (vtble[:,spw[i]].max() > sefdRAW[:,0].max()):
                casalog.post('*** ERROR: pieflag cannot extrapolate your SEFD.','ERROR')
                casalog.post('*** ERROR: Provide new SEFD covering your entire frequency range.','ERROR')
                casalog.post('*** ERROR: Exiting pieflag.','ERROR')
                return
        
        sefdINTERP = interp1d(sefdRAW[:,0],sefdRAW[:,1])
        for i in range(nspw):
            sefdREFCHAN = sefdINTERP(vtble[refchan[i]][spw[i]])
            for j in range(nchan):
                # values in each spectral window will be relative to the reference channel value
                sefd[i][j] = sefdINTERP(vtble[j][spw[i]]) / sefdREFCHAN
        
        if plotSEFD:
            # clunky, but works, meh...
            sefdPLOT=np.zeros((nspw*nchan,3))
            k=0
            for i in range(nspw):
                sefdREFCHAN = sefdINTERP(vtble[refchan[i]][spw[i]])
                for j in range(nchan):
                    sefdPLOT[k][0] = vtble[j][spw[i]]/1.0e9
                    sefdPLOT[k][1] = sefd[i][j] * sefdREFCHAN
                    sefdPLOT[k][2] = sefd[i][j]
                    k += 1
            
            f, (ax1, ax2) = plt.subplots(2,sharex=True)
            ax1.plot(sefdRAW[:,0]/1.0e9,sefdRAW[:,1],'b-',sefdPLOT[:,0],sefdPLOT[:,1],'r.',markersize=10)
            ax2.plot([sefdRAW[0,0]/1.0e9,sefdRAW[len(sefdRAW[:,0])-1,0]/1.0e9],[1.,1.],'c-',sefdPLOT[:,0],sefdPLOT[:,2],'r.',markersize=10)
            f.subplots_adjust(hspace=0)
            plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False)
            ax1.set_title('relative sensitivity assumed across your band,\nnormalized to the reference channel in each spw')
            ax1.legend(['raw input','interpolated'])
            ax1.set_ylabel('SEFD (arbitrary units)')
            ax2.set_xlabel('frequency (GHz)')
            ax2.set_ylabel('SEFD (normalized units per spw)')
    else:
        sefd=np.ones((nspw,nchan))
    
    if not staticflag:
        madmax = 0
        binsamples = 0
    
    if not dynamicflag:
        chunktime = 0
        stdmax = 0
        maxoffset = 0
    
    if not extendflag:
        boxtime = 0
        boxthresh = 0
    
    # forcibly remove all lock files
    #os.system('find '+vis+' -name "*lock" -print | xargs rm')
    
    if useMPI:
        casalog.post('--> pieflag will now flag your data using '+str(nthreads)+' parallel threads.')
        casalog.filter('WARN')
        for k in range(nthreads):
            param = {'vis':vis+'/SUBMSS/'+subms[k],'datacol':datacol,'nthreads':nthreads,'field':field,
                     'vtbleLIST':vtble.tolist(),'inttime':inttime,'nant':nant,
                     'ddid':ddid,'spw':spw,'refchan':refchan,'nchan':nchan,'npol':npol,'feedbasis':feedbasis,
                     'fitorderLIST':fitorder.tolist(),'sefdLIST':sefd.tolist(),
                     'staticflag':staticflag,'madmax':madmax,'binsamples':binsamples,
                     'dynamicflag':dynamicflag,'chunktime':chunktime,'stdmax':stdmax,'maxoffset':maxoffset,
                     'extendflag':extendflag,'boxtime':boxtime,'boxthresh':boxthresh}
            if k == 0:
                pid = client.push_command_request(fcall2,False,None,param)
            else:
                pid.append((client.push_command_request(fcall2,False,None,param))[0])
        
        presults = client.get_command_response(pid,True)
        casalog.filter('INFO')
    else:
        casalog.post('--> pieflag will now flag your data in serial mode.')
        pieflag_flag(vis,datacol,1,field,
                     vtble.tolist(),inttime,nant,
                     ddid,spw,refchan,nchan,npol,feedbasis,
                     fitorder.tolist(),sefd.tolist(),
                     staticflag,madmax,binsamples,
                     dynamicflag,chunktime,stdmax,maxoffset,
                     extendflag,boxtime,boxthresh)
    
    # show updated flagging statistics
    casalog.post('--> Statistics of final flags (including pre-existing):')
    flag1 = np.zeros((nspw,2*npol+2))
    for i in range(nspw):
        casalog.filter('WARN')
        if useMPI:
            for k in range(nthreads):
                param = {'vis':vis+'/SUBMSS/'+subms[k],'field':field,\
                         'spw':spw[i],'npol':npol,'feedbasis':feedbasis}
                if k == 0:
                    pid = client.push_command_request(fcall1,False,None,param)
                else:
                    pid.append((client.push_command_request(fcall1,False,None,param))[0])
            
            presults = client.get_command_response(pid,True)
            for k in range(nthreads):
                flag1[i] += presults[k]['ret']
            
        else:
            flag1[i] = pieflag_getflagstats(vis,field,spw[i],npol,feedbasis)
        
        casalog.filter('INFO')
        RRs="{:.1f}".format(flag1[i][0]/flag1[i][1]*100.)
        LLs="{:.1f}".format(flag1[i][2]/flag1[i][3]*100.)
        TOTs="{:.1f}".format(flag1[i][4]/flag1[i][5]*100.)
        if npol == 2:
            if feedbasis:
                outstr='    flagged data in spw='+str(spw[i])+':  RR='+RRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    flagged data in spw='+str(spw[i])+':  XX='+RRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        else:
            RLs="{:.1f}".format(flag1[i][6]/flag1[i][7]*100.)
            LRs="{:.1f}".format(flag1[i][8]/flag1[i][9]*100.)
            if feedbasis:
                outstr='    flagged data in spw='+str(spw[i])+':  RR='+RRs+'%  RL='+RLs+'%  LR='+LRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    flagged data in spw='+str(spw[i])+':  XX='+RRs+'%  XY='+RLs+'%  YX='+LRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        
        casalog.post(outstr)
    
    casalog.post('--> Statistics of pieflag flags (excluding pre-existing):')
    for i in range(nspw):
        RRs="{:.1f}".format((flag1[i][0]-flag0[i][0])/flag0[i][1]*100.)
        LLs="{:.1f}".format((flag1[i][2]-flag0[i][2])/flag0[i][3]*100.)
        TOTs="{:.1f}".format((flag1[i][4]-flag0[i][4])/flag0[i][5]*100.)
        if npol == 2:
            if feedbasis:
                outstr='    data flagged in spw='+str(spw[i])+':  RR='+RRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    data flagged in spw='+str(spw[i])+':  XX='+RRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        else:
            RLs="{:.1f}".format((flag1[i][6]-flag0[i][6])/flag0[i][7]*100.)
            LRs="{:.1f}".format((flag1[i][8]-flag0[i][8])/flag0[i][9]*100.)
            if feedbasis:
                outstr='    data flagged in spw='+str(spw[i])+':  RR='+RRs+'%  RL='+RLs+'%  LR='+LRs+'%  LL='+LLs+'%  total='+TOTs+'%'
            else:
                outstr='    data flagged in spw='+str(spw[i])+':  XX='+RRs+'%  XY='+RLs+'%  YX='+LRs+'%  YY='+LLs+'%  total='+TOTs+'%'
        
        casalog.post(outstr)
    
    # forcibly remove all lock files
    #os.system('find '+vis+' -name "*lock" -print | xargs rm')
    
    if useMPI:
        #client.set_log_level('INFO')
        client.push_command_request("casalog.filter('INFO')",True,server_list)
    
    t=time.time()-startTime
    casalog.post('--> pieflag run time:  '+str(int(t//3600))+' hours  '+\
                 str(int(t%3600//60))+' minutes  '+str(int(t%60))+' seconds')