def _configure_lazyflow_settings(): import lazyflow import lazyflow.request from lazyflow.utility import Memory from lazyflow.operators import cacheMemoryManager if status_interval_secs: memory_logger = logging.getLogger( "lazyflow.operators.cacheMemoryManager") memory_logger.setLevel(logging.DEBUG) cacheMemoryManager.setRefreshInterval(status_interval_secs) if n_threads is not None: logger.info(f"Resetting lazyflow thread pool with {n_threads} " "threads.") lazyflow.request.Request.reset_thread_pool(n_threads) if total_ram_mb > 0: if total_ram_mb < 500: raise Exception("In your current configuration, RAM is " f"limited to {total_ram_mb} MB. Remember " "to specify RAM in MB, not GB.") ram = total_ram_mb * 1024**2 fmt = Memory.format(ram) logger.info("Configuring lazyflow RAM limit to {}".format(fmt)) Memory.setAvailableRam(ram)
def _configure_lazyflow_settings(): import lazyflow import lazyflow.request from lazyflow.utility import Memory from lazyflow.operators.cacheMemoryManager import \ CacheMemoryManager if status_interval_secs: memory_logger = logging.getLogger( 'lazyflow.operators.cacheMemoryManager') memory_logger.setLevel(logging.DEBUG) CacheMemoryManager().setRefreshInterval(status_interval_secs) if n_threads is not None: logger.info(f'Resetting lazyflow thread pool with {n_threads} ' 'threads.') lazyflow.request.Request.reset_thread_pool(n_threads) if total_ram_mb > 0: if total_ram_mb < 500: raise Exception('In your current configuration, RAM is ' f'limited to {total_ram_mb} MB. Remember ' 'to specify RAM in MB, not GB.') ram = total_ram_mb * 1024**2 fmt = Memory.format(ram) logger.info("Configuring lazyflow RAM limit to {}".format(fmt)) Memory.setAvailableRam(ram)
def _configure_lazyflow_settings(): import lazyflow import lazyflow.request if n_threads is not None: logger.info("Resetting lazyflow thread pool with {} threads.".format( n_threads )) lazyflow.request.Request.reset_thread_pool(n_threads) if total_ram_mb > 0: if total_ram_mb < 500: raise Exception("In your current configuration, RAM is limited to {} MB." " Remember to specify RAM in MB, not GB." .format( total_ram_mb )) ram = total_ram_mb * 1024**2 fmt = Memory.format(ram) logger.info("Configuring lazyflow RAM limit to {}".format(fmt)) Memory.setAvailableRam(ram)
def _configure_lazyflow_settings(): import lazyflow import lazyflow.request if n_threads is not None: logger.info( "Resetting lazyflow thread pool with {} threads.".format( n_threads)) lazyflow.request.Request.reset_thread_pool(n_threads) if total_ram_mb > 0: if total_ram_mb < 500: raise Exception( "In your current configuration, RAM is limited to {} MB." " Remember to specify RAM in MB, not GB.".format( total_ram_mb)) ram = total_ram_mb * 1024**2 fmt = Memory.format(ram) logger.info("Configuring lazyflow RAM limit to {}".format(fmt)) Memory.setAvailableRam(ram)
def _cleanup(self): """ clean up once """ from lazyflow.operators.opCache import ObservableCache try: # notify subscribed functions about current cache memory total = 0 # Avoid "RuntimeError: Set changed size during iteration" with self._first_class_caches_lock: first_class_caches = self._first_class_caches.copy() for cache in first_class_caches: if isinstance(cache, ObservableCache): total += cache.usedMemory() self.totalCacheMemory(total) cache = None # check current memory state cache_memory = Memory.getAvailableRamCaches() cache_pct = 0.0 if cache_memory: cache_pct = total * 100.0 / cache_memory logger.debug( "Process memory usage is {:0.2f} GB out of {:0.2f} (caches are {}, {:.1f}% of allowed)".format( Memory.getMemoryUsage() / 2.0 ** 30, Memory.getAvailableRam() / 2.0 ** 30, Memory.format(total), cache_pct, ) ) if total <= self._max_usage * cache_memory: return # === we need a cache cleanup === # queue holds time stamps and cleanup functions q = PriorityQueue() caches = list(self._managed_caches) for c in caches: q.push((c.lastAccessTime(), c.name, c.freeMemory)) caches = list(self._managed_blocked_caches) for c in caches: for k, t in c.getBlockAccessTimes(): cleanupFun = functools.partial(c.freeBlock, k) info = "{}: {}".format(c.name, k) q.push((t, info, cleanupFun)) c = None caches = None while total > self._target_usage * cache_memory and len(q) > 0: t, info, cleanupFun = q.pop() mem = cleanupFun() logger.debug("Cleaned up {} ({})".format(info, Memory.format(mem))) total -= mem gc.collect() # don't keep a reference until next loop iteration cleanupFun = None q = None msg = "Done cleaning up, cache memory usage is now at {}".format(Memory.format(total)) if cache_memory > 0: msg += " ({:.1f}% of allowed)".format(total * 100.0 / cache_memory) logger.debug(msg) except: log_exception(logger)
def _cleanup(self): """ clean up once """ from lazyflow.operators.opCache import ObservableCache try: # notify subscribed functions about current cache memory total = 0 # Avoid "RuntimeError: Set changed size during iteration" with self._first_class_caches_lock: first_class_caches = self._first_class_caches.copy() for cache in first_class_caches: if isinstance(cache, ObservableCache): total += cache.usedMemory() self.totalCacheMemory(total) cache = None # check current memory state cache_memory = Memory.getAvailableRamCaches() if total <= self._max_usage * cache_memory: return # === we need a cache cleanup === # queue holds time stamps and cleanup functions q = PriorityQueue() caches = list(self._managed_caches) for c in caches: q.push((c.lastAccessTime(), c.name, c.freeMemory)) caches = list(self._managed_blocked_caches) for c in caches: for k, t in c.getBlockAccessTimes(): cleanupFun = functools.partial(c.freeBlock, k) info = "{}: {}".format(c.name, k) q.push((t, info, cleanupFun)) c = None caches = None msg = "Caches are using {} memory".format( Memory.format(total)) if cache_memory > 0: msg += " ({:.1f}% of allowed)".format( total*100.0/cache_memory) logger.debug(msg) while (total > self._target_usage * cache_memory and len(q) > 0): t, info, cleanupFun = q.pop() mem = cleanupFun() logger.debug("Cleaned up {} ({})".format( info, Memory.format(mem))) total -= mem gc.collect() # don't keep a reference until next loop iteration cleanupFun = None q = None msg = ("Done cleaning up, cache memory usage is now at " "{}".format(Memory.format(total))) if cache_memory > 0: msg += " ({:.1f}% of allowed)".format( total*100.0/cache_memory) logger.debug(msg) except: log_exception(logger)
def _cleanup(self): """ clean up once """ from lazyflow.operators.opCache import ObservableCache try: # notify subscribed functions about current cache memory total = 0 # Avoid "RuntimeError: Set changed size during iteration" with self._first_class_caches_lock: first_class_caches = self._first_class_caches.copy() for cache in first_class_caches: if isinstance(cache, ObservableCache): total += cache.usedMemory() self.totalCacheMemory(total) cache = None # check current memory state cache_memory = Memory.getAvailableRamCaches() cache_pct = 0.0 if cache_memory: cache_pct = total * 100.0 / cache_memory logger.debug( "Process memory usage is {:0.2f} GB out of {:0.2f} (caches are {}, {:.1f}% of allowed)".format( Memory.getMemoryUsage() / 2.0 ** 30, Memory.getAvailableRam() / 2.0 ** 30, Memory.format(total), cache_pct, ) ) if total <= self._max_usage * cache_memory: return cache_entries = [] cache_entries += [ (cache.lastAccessTime(), cache.name, cache.freeMemory) for cache in list(self._managed_caches) ] cache_entries += [ (lastAccessTime, f"{cache.name}: {blockKey}", functools.partial(cache.freeBlock, blockKey)) for cache in list(self._managed_blocked_caches) for blockKey, lastAccessTime in cache.getBlockAccessTimes() ] cache_entries.sort(key=lambda entry: entry[0]) for lastAccessTime, info, cleanupFun in cache_entries: if total <= self._target_usage * cache_memory: break mem = cleanupFun() logger.debug(f"Cleaned up {info} ({Memory.format(mem)})") total -= mem # Remove references to cache entries before triggering garbage collection. cleanupFun = None cache_entries = None gc.collect() msg = "Done cleaning up, cache memory usage is now at {}".format(Memory.format(total)) if cache_memory > 0: msg += " ({:.1f}% of allowed)".format(total * 100.0 / cache_memory) logger.debug(msg) except: log_exception(logger)