コード例 #1
0
class test_mpi4casa_log_level(unittest.TestCase):

    def setUp(self):
        
        self.vis = "Four_ants_3C286.mms"
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          
                
        self.client.push_command_request("import os",True,self.server_list)

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
        
        # Restore log file and level
        self.client.push_command_request("casalog.setlogfile(casa['files']['logfile'])",True,self.server_list)
        self.client.set_log_level("INFO")
    
    def test_mpi4casa_log_level_default_to_debug(self):
        """Test changing globally log level from default to debug """
            
        # Change log level globally (test via MPIInterface as it internally uses MPICommandClient so both are tested)
        mpi_interface = MPIInterface()
        mpi_interface.set_log_level("DEBUG")    
                
        # Use a separated log file per server to facilitate analysis
        for server in self.server_list:
            logfile = 'test_mpi4casa_log_level_debug-server-%s.log' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s')" % (logfile),True,server)        
            
        # Run flagdata 
        flagdata(vis=self.vis, mode='summary')  
        
        # Iterate trough log files to see if we find command handling msgs
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/test_mpi4casa_log_level_debug-server-%s.log' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MPICommandServer")<0, True, "MPICommandServer msgs should be filtered out")                 
コード例 #2
0
class test_mpi4casa_plotms(unittest.TestCase):

    def setUp(self):
        
        self.vis = 'Four_ants_3C286.mms'
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
    
    def test_mpi4casa_plotms_concurrent(self):
        """Run plotms on the same MS from each server simulateneously"""
        
        # Change current working directory
        self.client.push_command_request("os.chdir('%s')" % os.getcwd(),True,self.server_list)
        
        # Farm plotms jobs
        command_request_id_list = []
        for server in self.server_list:
            plotfile = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server)
            cmd = "plotms('%s', avgchannel='8',avgtime='60',plotfile='%s',showgui=False)" % (self.vis,plotfile)
            command_request_id = self.client.push_command_request(cmd,False,server)
            command_request_id_list.append(command_request_id[0])
            
        # Get response in block mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Compare files
        for server_idx in range(0,len(self.server_list)):
            for server_idy in range(server_idx+1,len(self.server_list)):
                server_x = self.server_list[server_idx]
                server_y = self.server_list[server_idy]
                plotfile_server_idx = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server_x)
                plotfile_server_idy = 'test_mpi4casa_plotms_concurrent-%s.png' % str(server_y)
                areEqual = filecmp.cmp(plotfile_server_idx,plotfile_server_idy)
                self.assertTrue(areEqual,"Plotfile generated by server %s is different from plotfile generated by server %s" 
                                % (str(server_x),str(server_y)))
コード例 #3
0
class test_MPICommandServer(unittest.TestCase):            
            
    def setUp(self):
        
        self.client = MPICommandClient()
        self.client.set_log_mode('unified')
        self.server_list = MPIEnvironment.mpi_server_rank_list()
        self.client.start_services()
                        
    def test_server_cannot_be_instantiated(self):
        
        instantiated = False
        try:
            server = MPICommandServer()
            instantiated = True
        except Exception, instance:
            instantiated = False
            
        self.assertEqual(instantiated, False, "It should not be possible to instantiate MPICommandServer in the client")
コード例 #4
0
class test_MPICommandClient(unittest.TestCase):
       
    def setUp(self):
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.server_list = MPIEnvironment.mpi_server_rank_list()
        self.client.start_services()
                            
    def test_exec_multiple_target_non_blocking_mode_str_params_successful(self):
        
        command_request_id_list = self.client.push_command_request("import time; time.sleep(3)",False,[self.server_list[0],self.server_list[1]])
        
        # Try to get responses before time in non-blocking more
        command_response_list = self.client.get_command_response(command_request_id_list,False,True)
        
        # Get response in blocking mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 2, "Command response list should contain two elements")
        for command_response in command_response_list:
            self.assertEqual(command_response['successful'], True, "Command execution was not successful")
            self.assertEqual(command_response['traceback'], None, "Command execution trace-back should be None")
            self.assertEqual(command_response['status'], 'response received', "Command status should be 'response received'")
            self.assertEqual(command_response['ret'], None, "Command return variable should be None")
                    
    def test_eval_multiple_target_blocking_mode_str_params_successful(self):
        
        command_response_list = self.client.push_command_request("1+1",True,[self.server_list[0],self.server_list[1]])
        self.assertEqual(len(command_response_list), 2, "Command response list should contain two elements")
        for command_response in command_response_list:
            self.assertEqual(command_response['successful'], True, "Command execution was not successful")
            self.assertEqual(command_response['traceback'], None, "Command execution trace-back should be None")
            self.assertEqual(command_response['status'], 'response received', "Command status should be 'response received'")
            self.assertEqual(command_response['ret'], 2, "Command return variable should be 2")
                        
    def test_eval_undefined_target_non_blocking_mode_dict_params_not_successful(self):
        
        command_request_id_list = self.client.push_command_request("pow(a,b)",False,None,{'a':'test','b':2})
        
        # Try to get responses before time in non-blocking more
        command_response_list = self.client.get_command_response(command_request_id_list,False,True)
        
        # Get response in blocking mode
        command_response_list = self.client.get_command_response(command_request_id_list,True,True)
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
        self.assertEqual(command_response_list[0]['successful'], False, "Command execution was successful")
        self.assertEqual(command_response_list[0]['traceback'].find("TypeError:")>=0, True, "Trace-back should contain TypeError")
        self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
        self.assertEqual(command_response_list[0]['ret'], None, "Command return variable should be None")
                    
    def test_singleton_behaviour(self):
        
        # Delete current MPICommandClient singleton instance reference
        client_ref = self.client
        del client_ref
        
        # Create a new MPICommandClient singleton instance reference
        new_client_ref = MPICommandClient()
        
        # Execute some command
        command_response_list = new_client_ref.push_command_request("a+b",True,[self.server_list[0]],{'a':1,'b':1})
        
        # Analyze command response list contents
        self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
        self.assertEqual(command_response_list[0]['successful'], True, "Command execution was not successful")
        self.assertEqual(command_response_list[0]['traceback'], None, "Command execution trace-back should be None")
        self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
        self.assertEqual(command_response_list[0]['ret'], 2, "Command return variable should be 2")       
コード例 #5
0
class test_mpi4casa_NullSelection(unittest.TestCase):

    def setUp(self):
        
        self.vis = "Four_ants_3C286.mms"
        setUpFile(self.vis,'vis')
        
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()       
        
        # Prepare list of servers
        self.server_list = []
        server_list = self.client.get_server_status()
        for server in server_list:
            if not server_list[server]['timeout']:
                self.server_list.append(server_list[server]['rank'])          
                
        self.client.push_command_request("import os",True,self.server_list)

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
        
        # Restore log file and filter
        self.client.push_command_request("casalog.setlogfile(casa['files']['logfile'])",True,self.server_list)        
    
    def test_mpi4casa_NullSelection_entire_mms(self):
        """Test filter out NullSelection exceptions"""
        
        # First clear list of filter out msgs. and make sure that the MSSelectionNullSelection shows up
        for server in self.server_list:
            logfile = 'MSSelectionNullSelection-Not-Filtered.log-server-%s' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s'); casalog.clearFilterMsgList()" % (logfile),True,server)
            
        # Run flagdata selecting a non-existing scan
        flagdata(vis=self.vis, scan='99')  
        
        # Iterate trough log files to see if we find the exception
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/MSSelectionNullSelection-Not-Filtered.log-server-%s' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MSSelectionNullSelection")>0, True, "MSSelectionNullSelection should not be filtered out")

        # Now populate the list of msg to be filter out including MSSelectionNullSelection
        text = ['MSSelectionNullSelection','NeverHappens']
        for server in self.server_list:
            logfile = 'MSSelectionNullSelection-Filtered.log-server-%s' % str(server)
            self.client.push_command_request("casalog.setlogfile('%s'); casalog.filterMsg(%s)" % (logfile,str(text)),True,server) 
        
        # Run flagdata selecting a non-existing scan
        flagdata(vis=self.vis, scan='99')  
        
        # Iterate trough log files to see if we find the exception
        for server in self.server_list:
            # Get current working directory (we might be in the 'nosedir' subdirectory)
            cwd = self.client.push_command_request("os.getcwd()",True,server)[0]['ret']
            logfile = '%s/MSSelectionNullSelection-Filtered.log-server-%s' % (cwd,str(server))
            content = open(logfile, 'r').read()
            if content.find('flagdata')>0: # Check only server with processed a flagdata sub-job
                self.assertEqual(content.find("MSSelectionNullSelection")<0, True, "MSSelectionNullSelection should be filtered out")       
コード例 #6
0
class test_mpi4casa_applycal(unittest.TestCase):

    def setUp(self):
        
        # Set-up MMS
        self.vis = "ngc5921.applycal.mms"
        self.vis_sorted = "ngc5921.applycal.sorted.mms"
        setUpFile(self.vis,'vis')
        
        # Set-up reference MMS
        self.ref = "ngc5921.applycal.ms"
        self.ref_sorted = "ngc5921.applycal.sorted.ms"
        setUpFile(self.ref,'ref')
        
        # Set-up auxiliary files
        self.aux = ["ngc5921.fluxscale", "ngc5921.gcal", "ngc5921.bcal"]
        setUpFile(self.aux ,'aux')
        
        # Repository caltables are pre-v4.1, and we
        # must update them _before_ applycal to avoid contention
        casalog.post("Updating pre-v4.1 caltables: %s" % str(self.aux),"WARN","test1_applycal_fluxscale_gcal_bcal")
        cblocal = cbtool()
        for oldct in self.aux:
            cblocal.updatecaltable(oldct)
        casalog.post("Pre-v4.1 caltables updated","INFO","test_mpi4casa_applycal")        
        
        # Tmp files
        self.vis2 = self.vis + '.2'
        self.vis3 = self.vis + '.3'
        self.vis_sorted2 = self.vis_sorted + '.2'
        self.vis_sorted3 = self.vis_sorted + '.3'
        
        # Tmp aux files
        self.aux2 = []
        self.aux3 = []
        for file in self.aux:
            self.aux2.append(file + '.2')
            self.aux3.append(file + '.3')
        
        # Set up cluster
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()          

    def tearDown(self):
        
        # Remove MMS
        os.system('rm -rf ' + self.vis) 
        os.system('rm -rf ' + self.vis_sorted) 
        
        # Remove ref MMS
        os.system('rm -rf ' + self.ref) 
        os.system('rm -rf ' + self.ref_sorted) 
        
        # Remove tmp files
        os.system('rm -rf ' + self.vis2)
        os.system('rm -rf ' + self.vis3)
        os.system('rm -rf ' + self.vis_sorted2)
        os.system('rm -rf ' + self.vis_sorted3)        
        
        # Remove aux files
        for file in self.aux: os.system('rm -rf ' + file)
            
        # Remove tmp aux files
        for file in self.aux2: os.system('rm -rf ' + file)            
        for file in self.aux3: os.system('rm -rf ' + file)                  
                     
        
    def test1_applycal_fluxscale_gcal_bcal(self):
        """Test 1: Apply calibration using fluxscal gcal and bcal tables"""
        
        # Run applycal in MS mode
        applycal(vis=self.ref,field='',spw='',selectdata=False,gaintable=self.aux,
                 gainfield=['nearest','nearest','0'],
                 interp=['linear', 'linear','nearest'],spwmap=[])
        
        # Run applycal in MMS mode
        applycal(vis=self.vis,field='',spw='',selectdata=False,gaintable=self.aux,
                 gainfield=['nearest','nearest','0'],
                 interp=['linear', 'linear','nearest'],spwmap=[])
        
        # Sort file to properly match rows for comparison
        casalog.post("Sorting vis file: %s" % str(self.vis),"INFO","test1_applycal_fluxscale_gcal_bcal")
        sortFile(self.vis,self.vis_sorted)  
        casalog.post("Sorting ref file: %s" % str(self.ref),"INFO","test1_applycal_fluxscale_gcal_bcal")    
        sortFile(self.ref,self.ref_sorted)        
        
        # Compare files
        compare = testhelper.compTables(self.ref_sorted,self.vis_sorted,['FLAG_CATEGORY'])
        self.assertTrue(compare)      
コード例 #7
0
class test_mpi4casa_flagdata(unittest.TestCase):

    def setUp(self):
        
        self.vis = "Four_ants_3C286.mms"
        setUpFile(self.vis,'vis')
        
        # Tmp files
        self.vis2 = self.vis + '.2'
        self.vis3 = self.vis + '.3'    
        
        # Set up cluster
        self.client = MPICommandClient()
        self.client.set_log_mode('redirect')
        self.client.start_services()  

    def tearDown(self):

        os.system('rm -rf ' + self.vis)
        
        # Remove tmp files
        os.system('rm -rf ' + self.vis2)
        os.system('rm -rf ' + self.vis3)        
    
    def test_mpi4casa_flagdata_list_return(self):
        """Test support for MMS using flagdata in unflag+clip mode"""

        # Create list file
        text = "mode='unflag'\n"\
               "mode='clip' clipminmax=[0,0.1]"
        filename = 'list_flagdata.txt'
        create_input(text, filename)

        # step 1: Do unflag+clip
        flagdata(vis=self.vis, mode='list', inpfile=filename)

        # step 2: Now do summary
        ret_dict = flagdata(vis=self.vis, mode='summary')

        # Check summary
        self.assertTrue(ret_dict['name']=='Summary')
        self.assertTrue(ret_dict['spw']['15']['flagged'] == 96284.0)
        self.assertTrue(ret_dict['spw']['0']['flagged'] == 129711.0)
        self.assertTrue(ret_dict['spw']['1']['flagged'] == 128551.0)
        self.assertTrue(ret_dict['spw']['2']['flagged'] == 125686.0)
        self.assertTrue(ret_dict['spw']['3']['flagged'] == 122862.0)
        self.assertTrue(ret_dict['spw']['4']['flagged'] == 109317.0)
        self.assertTrue(ret_dict['spw']['5']['flagged'] == 24481.0)
        self.assertTrue(ret_dict['spw']['6']['flagged'] == 0)
        self.assertTrue(ret_dict['spw']['7']['flagged'] == 0)
        self.assertTrue(ret_dict['spw']['8']['flagged'] == 0)
        self.assertTrue(ret_dict['spw']['9']['flagged'] == 27422.0)
        self.assertTrue(ret_dict['spw']['10']['flagged'] == 124638.0)
        self.assertTrue(ret_dict['spw']['11']['flagged'] == 137813.0)
        self.assertTrue(ret_dict['spw']['12']['flagged'] == 131896.0)
        self.assertTrue(ret_dict['spw']['13']['flagged'] == 125074.0)
        self.assertTrue(ret_dict['spw']['14']['flagged'] == 118039.0)
        
    def test_mpi4casa_flagdata_list_return_async(self):
        """Test flagdata summary in async mode"""
        
        # First run flagdata sequentially
        bypassParallelProcessing = ParallelTaskHelper.getBypassParallelProcessing()
        ParallelTaskHelper.bypassParallelProcessing(2)
        res = flagdata(vis=self.vis, mode='summary')
        ParallelTaskHelper.bypassParallelProcessing(bypassParallelProcessing)
        
        # Make a copy of the input MMS for each flagdata instance
        os.system("cp -r %s %s" % (self.vis,self.vis2))
        os.system("cp -r %s %s" % (self.vis,self.vis3))
        
        # Set async mode in ParallelTaskHelper
        ParallelTaskHelper.setAsyncMode(True)
        
        # Run applycal in MMS mode with the first set
        request_id_1 = flagdata(vis=self.vis, mode='summary')    
        
        # Run applycal in MMS mode with the second set
        request_id_2 = flagdata(vis=self.vis2, mode='summary')
        
        # Run applycal in MMS mode with the third set
        request_id_3 = flagdata(vis=self.vis3, mode='summary')
        
        # Get response in block mode
        reques_id_list = request_id_1 + request_id_2 + request_id_3
        command_response_list = self.client.get_command_response(reques_id_list,True,True)        
        
        # Get result
        res1 = ParallelTaskHelper.getResult(request_id_1,'flagdata')
        res2 = ParallelTaskHelper.getResult(request_id_2,'flagdata')
        res3 = ParallelTaskHelper.getResult(request_id_3,'flagdata')   
        
        # Unset async mode in ParallelTaskHelper
        ParallelTaskHelper.setAsyncMode(False)         
        
        self.assertEqual(res1,res, "flagdata dictionary does not match for the first flagdata run")
        self.assertEqual(res2,res, "flagdata dictionary does not match for the second flagdata run")
        self.assertEqual(res3,res, "flagdata dictionary does not match for the third flagdata run")       
        
        
    def test_mpi4casa_flagdata_list_return_multithreading(self):
        """Test flagdata summary in multithreading mode"""
        
        # First run flagdata sequentially
        bypassParallelProcessing = ParallelTaskHelper.getBypassParallelProcessing()
        ParallelTaskHelper.bypassParallelProcessing(2)
        res = flagdata(vis=self.vis, mode='summary')
        ParallelTaskHelper.bypassParallelProcessing(bypassParallelProcessing)
        
        # Make a copy of the input MMS for each flagdata instance
        os.system("cp -r %s %s" % (self.vis,self.vis2))
        os.system("cp -r %s %s" % (self.vis,self.vis3))
        
        ParallelTaskHelper.setMultithreadingMode(True)        
        
        # Set up workers
        cmd1 = "flagdata(vis='%s', mode='summary')" % (self.vis)
        worker1 = ParallelTaskWorker(cmd1)
        
        cmd2 = "flagdata(vis='%s', mode='summary')" % (self.vis2)
        worker2 = ParallelTaskWorker(cmd2)        
        
        cmd3 = "flagdata(vis='%s', mode='summary')" % (self.vis3)
        worker3 = ParallelTaskWorker(cmd3)          
        
        # Spawn worker threads
        worker1.start()
        worker2.start()
        worker3.start()
        
        # Get resulting summary ict from each worker
        res1 = worker1.getResult()
        res2 = worker2.getResult()
        res3 = worker3.getResult()
        
        ParallelTaskHelper.setMultithreadingMode(False) 
        
        # Compare return summary dicts with the one generated with a sequential run
        self.assertEqual(res1,res, "flagdata dictionary does not match for the first flagdata run")
        self.assertEqual(res2,res, "flagdata dictionary does not match for the second flagdata run")
        self.assertEqual(res3,res, "flagdata dictionary does not match for the third flagdata run")