def consolidate(self): """ Consolidates the results of the single processes into a consistent result of the whole operation """ assert (self.state == "retrieved") if ((self.ProcessingSuccessful == True) and (len(self.CrashedProcesses) == 0)): self.current_operation.consolidate() if ((self.ProcessingSuccessful == True) and (len(self.CrashedProcesses) != 0)): import pySPACE.resources.dataset_defs.performance_result.PerformanceResultSummary as PerformanceResultSummary # merge the remaining files print "***************************************************************************************************" print "Starting merging . . ." PerformanceResultSummary.merge_performance_results( self.current_operation.result_directory) print "Merging complete . . ." print "***************************************************************************************************" self._log("Operation - consolidated") self.state = "consolidated"
def main(): import sys, os file_path = os.path.dirname(os.path.abspath(__file__)) pyspace_path = file_path[:file_path.rfind('pySPACE')-1] if not pyspace_path in sys.path: sys.path.append(pyspace_path) from pySPACE.resources.dataset_defs.performance_result import PerformanceResultSummary as PerformanceResultSummary input_dir = sys.argv[1] PerformanceResultSummary.merge_performance_results(input_dir)
def consolidate(self): """ Consolidates the results of the single processes into a consistent result of the whole operation """ assert(self.state == "retrieved") if ((self.ProcessingSuccessful ==True) and (len(self.CrashedProcesses) == 0)): self.current_operation.consolidate() if ((self.ProcessingSuccessful ==True) and (len(self.CrashedProcesses) != 0)): import pySPACE.resources.dataset_defs.performance_result.PerformanceResultSummary as PerformanceResultSummary # merge the remaining files print "***************************************************************************************************" print "Starting merging . . ." PerformanceResultSummary.merge_performance_results(self.current_operation.result_directory) print "Merging complete . . ." print "***************************************************************************************************" self._log("Operation - consolidated") self.state = "consolidated"
def retrieve(self): """ Returns the result of the operation. """ self.state = "executing" self._log("Operation - executing") if (self.NumberOfProcessesToRunAtBeginning > len(self.process_args_list)): args = ([self.COMMAND_MPI] + ['--loadbalance']+ ['--nolocal']+ ['--hostfile'] + [pySPACE.configuration.root_dir+ "/" +'hostsfile'] + ['-n', str(len(self.process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + self.process_args_list) # Start the processes. p =subprocess.Popen(args) #self.pids.append(p) self.IndexCopyStart += self.NumberOfProcessesToRunAtBeginning #print args else: #copy the arguments of the processes to run sub_process_args_list = (self.process_args_list[self.IndexCopyStart: self.NumberOfProcessesToRunAtBeginning]) args = ([self.COMMAND_MPI] + ['--loadbalance']+ ['--nolocal']+ ['--hostfile'] + [pySPACE.configuration.root_dir+ "/" +'hostsfile'] + ['-n', str(len(sub_process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + sub_process_args_list) # Start the processes. p = subprocess.Popen(args) #self.pids.append(p) # TODO: call p.poll() for p in self.pids after all processes have exited self.IndexCopyStart += self.NumberOfProcessesToRunAtBeginning #print args # Create a list of boolean for processes which are finished. # First we assume that all processes are not started, so we set # every element of the list to false FinishedProcesses=[False for i in range(len(self.process_args_list))] # Wait until all processes finish and start new processes # when old ones finish print "Waiting for the processes to finish...." # Counter for the processes which are finished. It will be reset # after 'NumberOfProcessesToRunLater' processes are finished CounterProcessesFinished = 0 processes_Finished = False while not processes_Finished: try: processes_Finished = True for LoopCounter, process_args in enumerate(self.process_args_list): if (self.not_xor (os.path.isfile(process_args+"_Finished"), os.path.isfile(process_args+"_Crashed"))): processes_Finished = False else: if (FinishedProcesses[LoopCounter] == False): # Record that the process is finished FinishedProcesses[LoopCounter] = True # If the process is crashed take note of that if (os.path.isfile(process_args+"_Crashed")): self.CrashedProcesses.append(process_args) # Increment the counter for the number of processes finished # by one CounterProcessesFinished += 1 self.TotalProcessesFinished += 1 # update the progress bar self.progress_bar.update(self.TotalProcessesFinished) if (CounterProcessesFinished == self.NumberOfProcessesToRunLater): # Define a variable for a subset of processes to run sub_process_args_list = [] if (self.IndexCopyStart==len(self.process_args_list)): break elif ((self.IndexCopyStart+self.NumberOfProcessesToRunLater)< len(self.process_args_list)): sub_process_args_list = (self.process_args_list[self.IndexCopyStart: self.IndexCopyStart +self.NumberOfProcessesToRunLater]) else: sub_process_args_list = self.process_args_list[self.IndexCopyStart:len(self.process_args_list)] args = ([self.COMMAND_MPI] + ['--loadbalance']+ ['--nolocal']+ ['--hostfile'] + [pySPACE.configuration.root_dir+ "/" +'hostsfile'] + ['-n', str(len(sub_process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + sub_process_args_list) # Start the processes if (len(sub_process_args_list) > 0): p = subprocess.Popen(args) #print args # Adjust the start index self.IndexCopyStart += self.NumberOfProcessesToRunLater # Reset the counter for processes finished CounterProcessesFinished = 0 # sleep for one second time.sleep(1) except (KeyboardInterrupt, SystemExit): # if processes hang forever self.ProcessingSuccessful = False print "*********************************************************************************************************" print "pySPACE forced to stop ..." print "Please wait until mpi_backend is finished with consolidating the results generated and with clean up ..." print "**********************************************************************************************************" import pySPACE.resources.dataset_defs.performance_result.PerformanceResultSummary as PerformanceResultSummary # merge the remaining files print "***************************************************************************************************" print "Starting merging . . ." PerformanceResultSummary.merge_performance_results(self.current_operation.result_directory) print "Merging complete . . ." print "***************************************************************************************************" break #The while loop will break self._log("Operation - processing finished") # Change the state to retrieved self.state = "retrieved" return None
def retrieve(self, timeout=1e6): """ Returns the result of the operation. """ self.state = "executing" self._log("Operation - executing") if (self.NumberOfProcessesToRunAtBeginning > len( self.process_args_list)): args = ([self.COMMAND_MPI] + ['--loadbalance'] + ['--nolocal'] + ['--hostfile'] + [pySPACE.configuration.root_dir + "/" + 'hostsfile'] + ['-n', str(len(self.process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + self.process_args_list) # Start the processes. self._log("mpi-parameters: %s" % args, level=logging.DEBUG) self._log("mpi-parameters-joined: %s" % os.path.join(args), level=logging.DEBUG) p = subprocess.Popen(args) #self.pids.append(p) self.IndexCopyStart += self.NumberOfProcessesToRunAtBeginning #print args else: #copy the arguments of the processes to run sub_process_args_list = ( self.process_args_list[self.IndexCopyStart:self. NumberOfProcessesToRunAtBeginning]) args = ([self.COMMAND_MPI] + ['--loadbalance'] + ['--nolocal'] + ['--hostfile'] + [pySPACE.configuration.root_dir + "/" + 'hostsfile'] + ['-n', str(len(sub_process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + sub_process_args_list) # Start the processes. p = subprocess.Popen(args) #self.pids.append(p) # TODO: call p.poll() for p in self.pids after all processes have exited self.IndexCopyStart += self.NumberOfProcessesToRunAtBeginning #print args # Create a list of boolean for processes which are finished. # First we assume that all processes are not started, so we set # every element of the list to false FinishedProcesses = [False for i in range(len(self.process_args_list))] # Wait until all processes finish and start new processes # when old ones finish print "Waiting for the processes to finish...." # Counter for the processes which are finished. It will be reset # after 'NumberOfProcessesToRunLater' processes are finished CounterProcessesFinished = 0 processes_Finished = False while not processes_Finished: try: processes_Finished = True for LoopCounter, process_args in enumerate( self.process_args_list): if (self.not_xor( os.path.isfile(process_args + "_Finished"), os.path.isfile(process_args + "_Crashed"))): processes_Finished = False else: if (FinishedProcesses[LoopCounter] == False): # Record that the process is finished FinishedProcesses[LoopCounter] = True # If the process is crashed take note of that if (os.path.isfile(process_args + "_Crashed")): self.CrashedProcesses.append(process_args) # Increment the counter for the number of processes finished # by one CounterProcessesFinished += 1 self.TotalProcessesFinished += 1 # update the progress bar self.progress_bar.update( self.TotalProcessesFinished) if (CounterProcessesFinished == self.NumberOfProcessesToRunLater): # Define a variable for a subset of processes to run sub_process_args_list = [] if (self.IndexCopyStart == len( self.process_args_list)): break elif ((self.IndexCopyStart + self.NumberOfProcessesToRunLater) < len( self.process_args_list)): sub_process_args_list = ( self.process_args_list[ self.IndexCopyStart:self. IndexCopyStart + self.NumberOfProcessesToRunLater]) else: sub_process_args_list = self.process_args_list[ self.IndexCopyStart:len( self.process_args_list)] args = ( [self.COMMAND_MPI] + ['--loadbalance'] + ['--nolocal'] + ['--hostfile'] + [ pySPACE.configuration.root_dir + "/" + 'hostsfile' ] + ['-n', str(len(sub_process_args_list))] + [self.COMMAND_PYTHON] + [self.runner_script] + sub_process_args_list) # Start the processes if (len(sub_process_args_list) > 0): p = subprocess.Popen(args) #print args # Adjust the start index self.IndexCopyStart += self.NumberOfProcessesToRunLater # Reset the counter for processes finished CounterProcessesFinished = 0 # sleep for one second time.sleep(1) except (KeyboardInterrupt, SystemExit): # if processes hang forever self.ProcessingSuccessful = False print "*********************************************************************************************************" print "pySPACE forced to stop ..." print "Please wait until mpi_backend is finished with consolidating the results generated and with clean up ..." print "**********************************************************************************************************" import pySPACE.resources.dataset_defs.performance_result.PerformanceResultSummary as PerformanceResultSummary # merge the remaining files print "***************************************************************************************************" print "Starting merging . . ." PerformanceResultSummary.merge_performance_results( self.current_operation.result_directory) print "Merging complete . . ." print "***************************************************************************************************" break #The while loop will break self._log("Operation - processing finished") # Change the state to retrieved self.state = "retrieved" return None
from PyQt4 import QtGui # Copy the csv-files to a temporary directory such that there is no risk of # interfering with the running operation input_dir = sys.argv[1] temp_dir = tempfile.mkdtemp() for filename in fnmatch.filter(os.listdir(os.path.abspath(input_dir)), "*.csv"): shutil.copy(input_dir + os.sep + filename, temp_dir) file_path = os.path.dirname(os.path.abspath(__file__)) pyspace_path = file_path[:file_path.rfind('pySPACE') - 1] if not pyspace_path in sys.path: sys.path.append(pyspace_path) # Import csv-analysis and merge csv files from pySPACE.resources.dataset_defs.performance_result import PerformanceResultSummary PerformanceResultSummary.merge_performance_results(temp_dir) # Invoke results analysis gui from pySPACE.run.gui.performance_results_analysis import PerformanceResultsAnalysisMainWindow app = QtGui.QApplication(sys.argv) performance_results_analysis = \ PerformanceResultsAnalysisMainWindow(temp_dir + os.sep + "results.csv") performance_results_analysis.show() # Clean up shutil.rmtree(temp_dir) sys.exit(app.exec_())
# Copy the csv-files to a temporary directory such that there is no risk of # interfering with the running operation input_dir = sys.argv[1] temp_dir = tempfile.mkdtemp() for filename in fnmatch.filter(os.listdir(os.path.abspath(input_dir)), "*.csv"): shutil.copy(input_dir + os.sep + filename, temp_dir) file_path = os.path.dirname(os.path.abspath(__file__)) pyspace_path = file_path[:file_path.rfind('pySPACE')-1] if not pyspace_path in sys.path: sys.path.append(pyspace_path) # Import csv-analysis and merge csv files from pySPACE.resources.dataset_defs.performance_result import PerformanceResultSummary PerformanceResultSummary.merge_performance_results(temp_dir) # Invoke results analysis gui from pySPACE.run.gui.performance_results_analysis import PerformanceResultsAnalysisMainWindow app = QtGui.QApplication(sys.argv) performance_results_analysis = \ PerformanceResultsAnalysisMainWindow(temp_dir + os.sep + "results.csv") performance_results_analysis.show() # Clean up shutil.rmtree(temp_dir) sys.exit(app.exec_())