예제 #1
0
 def setUp(self):
     
     self.vis = "Four_ants_3C286.mms"
     setUpFile(self.vis,'vis')
     
     # Tmp files
     self.vis2 = self.vis + '.2'
     self.vis3 = self.vis + '.3'    
     
     # Set up cluster
     self.client = MPICommandClient()
     self.client.set_log_mode('redirect')
     self.client.start_services()  
예제 #2
0
 def setUp(self):
     
     self.vis = 'Four_ants_3C286.mms'
     setUpFile(self.vis,'vis')
     
     self.client = MPICommandClient()
     self.client.set_log_mode('redirect')
     self.client.start_services()       
     
     # Prepare list of servers
     self.server_list = []
     server_list = self.client.get_server_status()
     for server in server_list:
         if not server_list[server]['timeout']:
             self.server_list.append(server_list[server]['rank'])          
예제 #3
0
 def getResult(command_request_id_list,taskname):
     
     # Access MPICommandClietn singleton instance
     client = MPICommandClient()
     
     # Get response list
     command_response_list =  client.get_command_response(command_request_id_list,True,True)
             
     # Format list in the form of vis dict
     ret_list = {}
     for command_response in command_response_list:
         vis = command_response['parameters']['vis']
         ret_list[vis] = command_response['ret']
         
     # Consolidate results and return
     ret = ParallelTaskHelper.consolidateResults(ret_list,taskname)
     
     return ret                    
예제 #4
0
 def test_singleton_behaviour(self):
     
     # Delete current MPICommandClient singleton instance reference
     client_ref = self.client
     del client_ref
     
     # Create a new MPICommandClient singleton instance reference
     new_client_ref = MPICommandClient()
     
     # Execute some command
     command_response_list = new_client_ref.push_command_request("a+b",True,[self.server_list[0]],{'a':1,'b':1})
     
     # Analyze command response list contents
     self.assertEqual(len(command_response_list), 1, "Command response list should contain one element")
     self.assertEqual(command_response_list[0]['successful'], True, "Command execution was not successful")
     self.assertEqual(command_response_list[0]['traceback'], None, "Command execution trace-back should be None")
     self.assertEqual(command_response_list[0]['status'], 'response received', "Command status should be 'response received'")
     self.assertEqual(command_response_list[0]['ret'], 2, "Command return variable should be 2")       
예제 #5
0
 def setUp(self):
     
     # Set-up MMS
     self.vis = "ngc5921.applycal.mms"
     self.vis_sorted = "ngc5921.applycal.sorted.mms"
     setUpFile(self.vis,'vis')
     
     # Set-up reference MMS
     self.ref = "ngc5921.applycal.ms"
     self.ref_sorted = "ngc5921.applycal.sorted.ms"
     setUpFile(self.ref,'ref')
     
     # Set-up auxiliary files
     self.aux = ["ngc5921.fluxscale", "ngc5921.gcal", "ngc5921.bcal"]
     setUpFile(self.aux ,'aux')
     
     # Repository caltables are pre-v4.1, and we
     # must update them _before_ applycal to avoid contention
     casalog.post("Updating pre-v4.1 caltables: %s" % str(self.aux),"WARN","test1_applycal_fluxscale_gcal_bcal")
     cblocal = cbtool()
     for oldct in self.aux:
         cblocal.updatecaltable(oldct)
     casalog.post("Pre-v4.1 caltables updated","INFO","test_mpi4casa_applycal")        
     
     # Tmp files
     self.vis2 = self.vis + '.2'
     self.vis3 = self.vis + '.3'
     self.vis_sorted2 = self.vis_sorted + '.2'
     self.vis_sorted3 = self.vis_sorted + '.3'
     
     # Tmp aux files
     self.aux2 = []
     self.aux3 = []
     for file in self.aux:
         self.aux2.append(file + '.2')
         self.aux3.append(file + '.3')
     
     # Set up cluster
     self.client = MPICommandClient()
     self.client.set_log_mode('redirect')
     self.client.start_services()          
예제 #6
0
 def __init__(self, task_name, args={}):
     self._arg = dict(args)
     self._arguser = {}
     self._taskName = task_name
     self._executionList = []
     self._jobQueue = None
     # Cache the initial inputs
     self.__originalParams = args
     # jagonzal: Add reference to cluster object
     self._cluster = None
     self._mpi_cluster = False
     self._command_request_id_list = None
     if not MPIEnvironment.is_mpi_enabled:
         self.__bypass_parallel_processing = 1
     if (self.__bypass_parallel_processing == 0):
         self._mpi_cluster = True
         self._command_request_id_list = []
         self._cluster = MPICommandClient()
     # jagonzal: To inhibit return values consolidation
     self._consolidateOutput = True
     # jagonzal (CAS-4287): Add a cluster-less mode to by-pass parallel processing for MMSs as requested
     self._sequential_return_list = {}
예제 #7
0
 def setUp(self):
     
     self.client = MPICommandClient()
     self.client.set_log_mode('redirect')
     self.server_list = MPIEnvironment.mpi_server_rank_list()
     self.client.start_services()