Esempio n. 1
0
    def __init__(self, num_workers, worker_name):
        threading.Thread.__init__(self, name=worker_name)

        self.pool = Pool(num_workers=num_workers, name=worker_name)

        self.in_progress_jobs = Manager().list()
        self.lock = Manager().Lock()
Esempio n. 2
0
    def __init__(self, parent=None):
        super(ICEExporter, self).__init__(parent)
        self.files = []
        self.state = self.STOP
        self.destination_folder = '.'
        self.t1 = 0
        self.t2 = 0
        self.fmt = CONSTS.TEXT_FMT
        self.pool = Pool(self)

        # dialogs
        self._folder_dialog = ICEExportFolderDialog(self, parent)
        self._file_dialog = ICEExportFileDialog(self, parent)
 def __init__(self, parent = None):
     super(ICEExporter,self).__init__(parent)
     self.files = []
     self.state = self.STOP
     self.destination_folder = '.'
     self.t1 = 0
     self.t2 = 0
     self.fmt = CONSTS.TEXT_FMT
     self.pool = Pool(self)
     
     # dialogs
     self._folder_dialog = ICEExportFolderDialog(self, parent)
     self._file_dialog = ICEExportFileDialog(self, parent)
Esempio n. 4
0
class JobManager(threading.Thread):
    def __init__(self, num_workers, worker_name):
        threading.Thread.__init__(self, name=worker_name)

        self.pool = Pool(num_workers=num_workers, name=worker_name)

        self.in_progress_jobs = Manager().list()
        self.lock = Manager().Lock()

    def run(self):
        while 1:
            try:
                time.sleep(10)
                self.dispatch()
            except Exception:
                # Print to debug console instead of to DB.
                import traceback
                print(traceback.format_exc())

    def dispatch(self):
        raise NotImplementedError("Children must override dispatch()")

    def submit_job(self, work_unit):
        with self.lock:
            if work_unit.get_unique_key() in self.in_progress_jobs:
                return False

            self.in_progress_jobs.append(work_unit.get_unique_key())

        # Remember these shared memory references
        work_unit.in_progress_jobs = self.in_progress_jobs
        work_unit.lock = self.lock

        self.pool.submit(work_unit)

        return True
Esempio n. 5
0
class ICEExporter(QtCore.QObject):
    """ Class to manage processes for exporting ICE cache data to ascii. """
    # string: export file name
    cacheExporting = QtCore.pyqtSignal(str)
    # int: number of files to export
    beginCacheExporting = QtCore.pyqtSignal(int)
    endCacheExporting = QtCore.pyqtSignal()

    # states
    STOP = 0
    RUN = 1
    ERROR = 2

    def __init__(self, parent=None):
        super(ICEExporter, self).__init__(parent)
        self.files = []
        self.state = self.STOP
        self.destination_folder = '.'
        self.t1 = 0
        self.t2 = 0
        self.fmt = CONSTS.TEXT_FMT
        self.pool = Pool(self)

        # dialogs
        self._folder_dialog = ICEExportFolderDialog(self, parent)
        self._file_dialog = ICEExportFileDialog(self, parent)

    @property
    def folder_dialog(self):
        return self._folder_dialog

    @property
    def file_dialog(self):
        return self._file_dialog

    def cancel(self):
        self.pool.cancel()

    def export_folder(self, folder, destination, fmt=CONSTS.TEXT_FMT):
        """ Export the cache files contained in a folder """
        (self.files, startindex, end) = get_files_from_cache_folder(folder)
        self.destination_folder = destination
        self.state = self.STOP
        self.files_processed = 0
        self.t1 = 0
        self.t2 = 0
        self.fmt = fmt
        self._start()

    def export_files(self, files, destination, fmt=CONSTS.TEXT_FMT):
        """ Export a list of cache files """
        (self.files, startindex, end) = get_files(files)
        self.destination_folder = destination
        self.state = self.STOP
        self.t1 = 0
        self.t2 = 0
        self.fmt = fmt
        self._start()

    def _start(self):
        """ Start file export process. """
        self.file_block = 1
        self.indexset = range(0, len(self.files), self.file_block)

        # Submit export tasks to process pool
        cpu_count = 1
        if self.parent():
            cpu_count = self.parent().prefs.process_count
        else:
            import multiprocessing as mp
            cpu_count = mp.cpu_count()

        self.pool.init(cpu_count, self._on_process_callback)
        self.state = self.STOP
        file_count = len(self.files)
        self.files_processed = 0
        for i in self.indexset:
            file_list = []
            for j in range(self.file_block):
                if i + j < file_count:
                    file_list.append(self.files[i + j])
            self.pool.submit(
                ExportTask([file_list, self.destination_folder, self.fmt]))

    def _on_process_callback(self, sender, notif, arg):
        """ Called when an event occurs from a process """

        if notif == Pool.STARTED:
            #print 'Pool.STARTED'
            if self.state == self.STOP:
                self.t1 = time.time()
                self.beginCacheExporting.emit(len(self.files))
                self.state = self.RUN

        elif notif == Pool.ERROR:
            print 'process error: %d - %s' % (sender.pid(), arg)
            self.endCacheExporting.emit()
            self.state = self.ERROR

        elif notif == Pool.STATE_CHANGE:
            pass

        elif notif == Pool.OUTPUT_MSG:
            if self.state == self.ERROR:
                return
            s_out = bytes.decode(bytes(sender.readAllStandardOutput()))
            #print 'Pool.OUTPUT_MSG: %s' % s_out
            self.cacheExporting.emit(s_out)

        elif notif == Pool.OUTPUT_ERROR_MSG:
            s_out = bytes.decode(bytes(sender.readAllStandardError()))
            print 'process error output: %s - %s\nRetry your operation.' % (
                (repr(sender)), s_out)

        elif notif == Pool.FINISHED:
            #print 'Pool.FINISHED'

            if self.state == self.ERROR:
                return
            self.files_processed += self.file_block
            if self.files_processed >= len(self.files):
                self.t2 = time.time()
                self.endCacheExporting.emit()
                self.state = self.STOP
                print 'Export time %0.3f s' % (self.t2 - self.t1)
 def init_process_server( self ):
     self._state = self.STOP
     self._files = []
     self._cache = {}
     self._pool = Pool(self)
     self._pool.init(self.parent().prefs.process_count, self._on_process_callback)
class ICECacheLoader(QtCore.QObject):
    """ Class for loading cache files. Files are loaded through processes managed by the Pool class. The data loaded
    by processes is sent to ICECacheLoader via PyQt's QLocalSocket and QLocalServer which are basically a named pipe. """
    # signal sent for each new loaded cache
    # int: cache index
    # tuple: (reader, points, colors, sizes)
    #cacheLoaded = QtCore.pyqtSignal( int, tuple ) 
    cacheLoaded = QtCore.pyqtSignal( int, str ) 
    beginCacheLoading = QtCore.pyqtSignal()
    endCacheLoading = QtCore.pyqtSignal()

    # Loading state
    STOP = 0
    RUN = 1
    ERROR = 2
    
    def __init__(self, parent = None):    
        super(ICECacheLoader,self).__init__(parent)
        self._pool = None
        self._state = self.STOP
        self._files = []
        self._cache = {}
        
    def init_process_server( self ):
        self._state = self.STOP
        self._files = []
        self._cache = {}
        self._pool = Pool(self)
        self._pool.init(self.parent().prefs.process_count, self._on_process_callback)

    def cancel(self):
        self._state = self.STOP
        self.endCacheLoading.emit()        
        if self._pool:
            self._pool.cancel()
        print "Operation was cancelled."

    def points( self, cache_index ):            
        if cache_index in self._cache and self._cache[ cache_index ] != None and POINT_DATA in self._cache[ cache_index ]:
            return self._cache[ cache_index ][ POINT_DATA ][:]
        return []

    def colors( self, cache_index ):
        if cache_index in self._cache and self._cache[ cache_index ] != None and COLOR_DATA in self._cache[ cache_index ]:
            return self._cache[ cache_index ][ COLOR_DATA ][:]
        return []
            
    def sizes( self, cache_index ):
        if cache_index in self._cache and self._cache[ cache_index ] != None and SIZE_DATA in self._cache[ cache_index ]:
            return self._cache[ cache_index ][ SIZE_DATA ][:]
        return []            

    def __getitem__( self, arg ):
        """ return item cache by index """
        return self._cache[ arg ]
    
    def load_cache_files( self, files, start, end ):    
        """ Start the loading process. """         
        # initialize the process server first
        self.init_process_server()

        self._files = files
        self.startindex = start
        self.endindex = end        

        # file_block can be used to assing multiple files per process. 
        # note: Normally this would make poor load-balancing and affect performance load
        self.file_block = 1
        self.indexset = range(0,len(self._files),self.file_block)     

        # Submit all load tasks to the process pool
        # note: 1 file / task gives very good performance in general.
        self._state = self.STOP
        file_count = len(self._files)
        self._files_processed = 0
        index = self.startindex
        for i in self.indexset:
            file_list = []
            file_index = []
            for j in range(self.file_block):
                if i+j < file_count:
                    file_list.append( self._files[i+j] )
                    file_index.append( index )
                    index += 1                    
            self._pool.submit( LoaderTask( [ file_list, file_index ] ) )
        
    def _on_process_callback( self, sender, notif, arg ):
        """ Called when an event occurs from a process """        
        if notif == Pool.STARTED:
            if self._state == self.STOP:
                self.t1 = time.time()
                self._state = self.RUN
                self.beginCacheLoading.emit()
            return 
                
        elif notif == Pool.ERROR:
            self._state = self.ERROR
            return 
            
        elif notif == Pool.STATE_CHANGE:
            return 
            
        elif notif == Pool.OUTPUT_MSG:
            if self._state == self.ERROR:
                return 
            # Process has finished loading the file
            s_data = bytes.decode( bytes( sender.readAllStandardOutput() ) )
            try:
                data = eval(s_data) 
                # save cache
                self._cache[ data[0] ] = h5.File( data[1], 'r' )
                
                # notify clients                
                self.cacheLoaded.emit( data[0], data[1] )
                
            except:
                # problem, just cancel everything
                self.cancel()
                raise Pool.Error
                         
        elif notif == Pool.OUTPUT_ERROR_MSG:
            self._state = self.ERROR
            s_out = bytes.decode( bytes( sender.readAllStandardError() ) )
            print 'process error output: %s - %s\nRetry your operation.' % ((repr(sender)),s_out)
            
        elif notif == Pool.FINISHED:
            if self._state == self.STOP:
                return 
            
            if self._state == self.ERROR:
                # tell clients the loading process has been cancelled or was terminated prematurely
                self.cancel()
                return 
                        
            #print 'process finished: %s\n' % (repr(sender))
            self._files_processed += self.file_block
            if self._files_processed >= len(self._files):
                self.t2 = time.time()
                self._state = self.STOP
                self.endCacheLoading.emit()
                print 'Processes %d Loading time %0.3f s' % (self._pool.process_count,self.t2-self.t1)
            return 
class ICEExporter(QtCore.QObject):
    """ Class to manage processes for exporting ICE cache data to ascii. """
    # string: export file name
    cacheExporting = QtCore.pyqtSignal( str ) 
    # int: number of files to export
    beginCacheExporting = QtCore.pyqtSignal( int ) 
    endCacheExporting = QtCore.pyqtSignal( ) 

    # states
    STOP = 0
    RUN = 1
    ERROR = 2
        
    def __init__(self, parent = None):
        super(ICEExporter,self).__init__(parent)
        self.files = []
        self.state = self.STOP
        self.destination_folder = '.'
        self.t1 = 0
        self.t2 = 0
        self.fmt = CONSTS.TEXT_FMT
        self.pool = Pool(self)
        
        # dialogs
        self._folder_dialog = ICEExportFolderDialog(self, parent)
        self._file_dialog = ICEExportFileDialog(self, parent)

    @property
    def folder_dialog( self ):
        return self._folder_dialog

    @property
    def file_dialog( self ):
        return self._file_dialog
    
    def cancel(self):
        self.pool.cancel()

    def export_folder( self, folder, destination, fmt=CONSTS.TEXT_FMT ):    
        """ Export the cache files contained in a folder """
        (self.files,startindex,end) = get_files_from_cache_folder( folder )
        self.destination_folder = destination
        self.state = self.STOP
        self.files_processed = 0
        self.t1 = 0
        self.t2 = 0
        self.fmt = fmt
        self._start()

    def export_files( self, files, destination, fmt=CONSTS.TEXT_FMT):    
        """ Export a list of cache files """
        (self.files,startindex,end) = get_files(files)
        self.destination_folder = destination
        self.state = self.STOP
        self.t1 = 0
        self.t2 = 0
        self.fmt = fmt
        self._start()

    def _start( self ):
        """ Start file export process. """ 
        self.file_block = 1
        self.indexset = range( 0, len(self.files), self.file_block ) 

        # Submit export tasks to process pool
        cpu_count = 1
        if self.parent():
            cpu_count = self.parent().prefs.process_count
        else:
            import multiprocessing as mp
            cpu_count = mp.cpu_count() 
            
        self.pool.init( cpu_count, self._on_process_callback )
        self.state = self.STOP
        file_count = len(self.files)
        self.files_processed = 0
        for i in self.indexset:
            file_list = []
            for j in range(self.file_block):
                if i+j < file_count:
                    file_list.append( self.files[i+j] )                
            self.pool.submit( ExportTask( [file_list, self.destination_folder, self.fmt] ) )            

    def _on_process_callback( self, sender, notif, arg ):
        """ Called when an event occurs from a process """
        
        if notif == Pool.STARTED:
            #print 'Pool.STARTED'
            if self.state == self.STOP:
                self.t1 = time.time()
                self.beginCacheExporting.emit( len(self.files) )
                self.state = self.RUN
                
        elif notif == Pool.ERROR:
            print 'process error: %d - %s' % (sender.pid(), arg)
            self.endCacheExporting.emit( )
            self.state = self.ERROR
            
        elif notif == Pool.STATE_CHANGE:
            pass
            
        elif notif == Pool.OUTPUT_MSG:
            if self.state == self.ERROR:
                return
            s_out = bytes.decode( bytes( sender.readAllStandardOutput() ) )
            #print 'Pool.OUTPUT_MSG: %s' % s_out
            self.cacheExporting.emit( s_out )            
                
        elif notif == Pool.OUTPUT_ERROR_MSG:
            s_out = bytes.decode( bytes( sender.readAllStandardError() ) )
            print 'process error output: %s - %s\nRetry your operation.' % ((repr(sender)),s_out)
                
        elif notif == Pool.FINISHED:
            #print 'Pool.FINISHED'

            if self.state == self.ERROR:
                return
            self.files_processed += self.file_block
            if self.files_processed >= len(self.files):
                self.t2 = time.time()
                self.endCacheExporting.emit( )            
                self.state = self.STOP
                print 'Export time %0.3f s' % (self.t2-self.t1)