Ejemplo n.º 1
0
 def testSettings(self):
     assert Memory.getAvailableRam() > 0
     assert Memory.getAvailableRamCaches() > 0
     ram = 47 * 1111
     Memory.setAvailableRam(ram)
     assert Memory.getAvailableRam() == ram
     cache_ram = ram // 3
     Memory.setAvailableRamCaches(cache_ram)
     assert Memory.getAvailableRamCaches() == cache_ram
Ejemplo n.º 2
0
 def testSettings(self):
     assert Memory.getAvailableRam() > 0
     assert Memory.getAvailableRamCaches() > 0
     ram = 47 * 1111
     Memory.setAvailableRam(ram)
     assert Memory.getAvailableRam() == ram
     cache_ram = ram // 3
     Memory.setAvailableRamCaches(cache_ram)
     assert Memory.getAvailableRamCaches() == cache_ram
Ejemplo n.º 3
0
    def _cleanup(self):
        """
        clean up once
        """
        from lazyflow.operators.opCache import ObservableCache

        try:
            # notify subscribed functions about current cache memory
            total = 0

            # Avoid "RuntimeError: Set changed size during iteration"
            with self._first_class_caches_lock:
                first_class_caches = self._first_class_caches.copy()

            for cache in first_class_caches:
                if isinstance(cache, ObservableCache):
                    total += cache.usedMemory()
            self.totalCacheMemory(total)
            cache = None

            # check current memory state
            cache_memory = Memory.getAvailableRamCaches()
            cache_pct = 0.0
            if cache_memory:
                cache_pct = total * 100.0 / cache_memory

            logger.debug(
                "Process memory usage is {:0.2f} GB out of {:0.2f} (caches are {}, {:.1f}% of allowed)".format(
                    Memory.getMemoryUsage() / 2.0 ** 30,
                    Memory.getAvailableRam() / 2.0 ** 30,
                    Memory.format(total),
                    cache_pct,
                )
            )

            if total <= self._max_usage * cache_memory:
                return

            # === we need a cache cleanup ===

            # queue holds time stamps and cleanup functions
            q = PriorityQueue()
            caches = list(self._managed_caches)
            for c in caches:
                q.push((c.lastAccessTime(), c.name, c.freeMemory))
            caches = list(self._managed_blocked_caches)
            for c in caches:
                for k, t in c.getBlockAccessTimes():
                    cleanupFun = functools.partial(c.freeBlock, k)
                    info = "{}: {}".format(c.name, k)
                    q.push((t, info, cleanupFun))
            c = None
            caches = None

            while total > self._target_usage * cache_memory and len(q) > 0:
                t, info, cleanupFun = q.pop()
                mem = cleanupFun()
                logger.debug("Cleaned up {} ({})".format(info, Memory.format(mem)))
                total -= mem
            gc.collect()
            # don't keep a reference until next loop iteration
            cleanupFun = None
            q = None

            msg = "Done cleaning up, cache memory usage is now at {}".format(Memory.format(total))
            if cache_memory > 0:
                msg += " ({:.1f}% of allowed)".format(total * 100.0 / cache_memory)
            logger.debug(msg)
        except:
            log_exception(logger)
Ejemplo n.º 4
0
    def _cleanup(self):
        """
        clean up once
        """
        from lazyflow.operators.opCache import ObservableCache
        try:
            # notify subscribed functions about current cache memory
            total = 0
            
            # Avoid "RuntimeError: Set changed size during iteration"
            with self._first_class_caches_lock:
                first_class_caches = self._first_class_caches.copy()
            
            for cache in first_class_caches:
                if isinstance(cache, ObservableCache):
                    total += cache.usedMemory()
            self.totalCacheMemory(total)
            cache = None

            # check current memory state
            cache_memory = Memory.getAvailableRamCaches()

            if total <= self._max_usage * cache_memory:
                return

            # === we need a cache cleanup ===

            # queue holds time stamps and cleanup functions
            q = PriorityQueue()
            caches = list(self._managed_caches)
            for c in caches:
                q.push((c.lastAccessTime(), c.name, c.freeMemory))
            caches = list(self._managed_blocked_caches)
            for c in caches:
                for k, t in c.getBlockAccessTimes():
                    cleanupFun = functools.partial(c.freeBlock, k)
                    info = "{}: {}".format(c.name, k)
                    q.push((t, info, cleanupFun))
            c = None
            caches = None

            msg = "Caches are using {} memory".format(
                Memory.format(total))
            if cache_memory > 0:
                 msg += " ({:.1f}% of allowed)".format(
                    total*100.0/cache_memory)
            logger.debug(msg)

            while (total > self._target_usage * cache_memory
                   and len(q) > 0):
                t, info, cleanupFun = q.pop()
                mem = cleanupFun()
                logger.debug("Cleaned up {} ({})".format(
                    info, Memory.format(mem)))
                total -= mem
            gc.collect()
            # don't keep a reference until next loop iteration
            cleanupFun = None
            q = None

            msg = ("Done cleaning up, cache memory usage is now at "
                   "{}".format(Memory.format(total)))
            if cache_memory > 0:
                 msg += " ({:.1f}% of allowed)".format(
                    total*100.0/cache_memory)
            logger.debug(msg)
        except:
            log_exception(logger)
Ejemplo n.º 5
0
    def _cleanup(self):
        """
        clean up once
        """
        from lazyflow.operators.opCache import ObservableCache

        try:
            # notify subscribed functions about current cache memory
            total = 0

            # Avoid "RuntimeError: Set changed size during iteration"
            with self._first_class_caches_lock:
                first_class_caches = self._first_class_caches.copy()

            for cache in first_class_caches:
                if isinstance(cache, ObservableCache):
                    total += cache.usedMemory()
            self.totalCacheMemory(total)
            cache = None

            # check current memory state
            cache_memory = Memory.getAvailableRamCaches()
            cache_pct = 0.0
            if cache_memory:
                cache_pct = total * 100.0 / cache_memory

            logger.debug(
                "Process memory usage is {:0.2f} GB out of {:0.2f} (caches are {}, {:.1f}% of allowed)".format(
                    Memory.getMemoryUsage() / 2.0 ** 30,
                    Memory.getAvailableRam() / 2.0 ** 30,
                    Memory.format(total),
                    cache_pct,
                )
            )

            if total <= self._max_usage * cache_memory:
                return

            cache_entries = []
            cache_entries += [
                (cache.lastAccessTime(), cache.name, cache.freeMemory) for cache in list(self._managed_caches)
            ]
            cache_entries += [
                (lastAccessTime, f"{cache.name}: {blockKey}", functools.partial(cache.freeBlock, blockKey))
                for cache in list(self._managed_blocked_caches)
                for blockKey, lastAccessTime in cache.getBlockAccessTimes()
            ]
            cache_entries.sort(key=lambda entry: entry[0])

            for lastAccessTime, info, cleanupFun in cache_entries:
                if total <= self._target_usage * cache_memory:
                    break
                mem = cleanupFun()
                logger.debug(f"Cleaned up {info} ({Memory.format(mem)})")
                total -= mem

            # Remove references to cache entries before triggering garbage collection.
            cleanupFun = None
            cache_entries = None
            gc.collect()

            msg = "Done cleaning up, cache memory usage is now at {}".format(Memory.format(total))
            if cache_memory > 0:
                msg += " ({:.1f}% of allowed)".format(total * 100.0 / cache_memory)
            logger.debug(msg)
        except:
            log_exception(logger)