Ejemplo n.º 1
0
 def output_tester(self):
     recordlog = treelog.RecordLog(simplify=True)
     yield recordlog
     self.assertEqual(
         recordlog._messages,
         [('write', 'my message', treelog.proto.Level.user),
          ('open', 0, 'test.dat', 'w', treelog.proto.Level.info),
          ('close', 0, 'test1'), ('pushcontext', 'my context'),
          ('pushcontext', 'iter 1'),
          ('write', 'a', treelog.proto.Level.info), ('recontext', 'iter 2'),
          ('write', 'b', treelog.proto.Level.info), ('recontext', 'iter 3'),
          ('write', 'c', treelog.proto.Level.info), ('popcontext', ),
          ('write', 'multiple..\n  ..lines', treelog.proto.Level.error),
          ('open', 1, 'test.dat', 'wb', treelog.proto.Level.user),
          ('write', 'generating', treelog.proto.Level.info),
          ('close', 1, b'test2'), ('recontext', 'generate_test'),
          ('open', 2, 'test.dat', 'wb', treelog.proto.Level.warning),
          ('close', 2, b'test3'), ('recontext', 'context step=0'),
          ('write', 'foo', treelog.proto.Level.info),
          ('recontext', 'context step=1'),
          ('write', 'bar', treelog.proto.Level.info), ('popcontext', ),
          ('open', 3, 'same.dat', 'wb', treelog.proto.Level.error),
          ('close', 3, b'test3'),
          ('open', 4, 'dbg.dat', 'wb', treelog.proto.Level.debug),
          ('close', 4, b'test4'),
          ('write', 'dbg', treelog.proto.Level.debug),
          ('write', 'warn', treelog.proto.Level.warning)])
     for Log in StdoutLog, DataLog, HtmlLog:
         with self.subTest('replay to {}'.format(
                 Log.__name__)), Log.output_tester(self) as log:
             recordlog.replay(log)
Ejemplo n.º 2
0
 def wrapper(*args, **kwargs):
     if _cache.value is None:
         return func(*args, **kwargs)
     args, kwargs = canonicalize(*args, **kwargs)
     # Hash the function key and the canonicalized arguments and compute the
     # hexdigest.  This is used to identify cache file `cachefile`.
     h = hashlib.sha1(func_key)
     for arg in args:
         h.update(types.nutils_hash(arg))
     for hkv in sorted(
             hashlib.sha1(k.encode()).digest() + types.nutils_hash(v)
             for k, v in kwargs.items()):
         h.update(hkv)
     hkey = h.hexdigest()
     cachefile = _cache.value / hkey
     # Open and lock `cachefile`.  Try to read it and, if successful, unlock
     # the file (implicitly by closing the file) and return the value.  If
     # reading fails, e.g. because the file did not exist, call `func`, store
     # the result, unlock and return.  While not necessary per se, we lock the
     # file immediately to avoid checking twice if there is a cached value: once
     # before locking the file, and once after locking, at which point another
     # party may have written something to the cache already.
     cachefile.parent.mkdir(parents=True, exist_ok=True)
     cachefile.touch()
     with cachefile.open('r+b') as f:
         log.debug('[cache.function {}] acquiring lock'.format(hkey))
         _lock_file(f)
         log.debug('[cache.function {}] lock acquired'.format(hkey))
         try:
             data = pickle.load(f)
             if len(data) == 3:  # For old caches.
                 log_, fail, value = data
                 if fail:
                     raise pickle.UnpicklingError
             else:
                 value, log_ = data
         except (EOFError, pickle.UnpicklingError, IndexError):
             log.debug(
                 '[cache.function {}] failed to load, cache will be rewritten'
                 .format(hkey))
             pass
         else:
             log.debug('[cache.function {}] load'.format(hkey))
             log_.replay()
             return value
         # Seek back to the beginning, because pickle might have read garbage.
         f.seek(0)
         # Disable the cache temporarily to prevent caching subresults *in* `func`.
         log_ = log.RecordLog()
         with disable(), log.add(log_):
             value = func(*args, **kwargs)
         pickle.dump((value, log_), f)
         log.debug('[cache.function {}] store'.format(hkey))
         return value
Ejemplo n.º 3
0
 def output_tester(self):
     recordlog = treelog.RecordLog()
     yield treelog.FilterLog(recordlog, minlevel=treelog.proto.Level.user)
     self.assertEqual(
         recordlog._messages,
         [('write', 'my message', treelog.proto.Level.user),
          ('pushcontext', 'my context'),
          ('write', 'multiple..\n  ..lines', treelog.proto.Level.error),
          ('open', 0, 'test.dat', 'wb', treelog.proto.Level.user),
          ('close', 0, b'test2'), ('recontext', 'generate_test'),
          ('open', 1, 'test.dat', 'wb', treelog.proto.Level.warning),
          ('close', 1, b'test3'), ('popcontext', ),
          ('open', 2, 'same.dat', 'wb', treelog.proto.Level.error),
          ('close', 2, b'test3'),
          ('write', 'warn', treelog.proto.Level.warning)])
Ejemplo n.º 4
0
 def output_tester(self):
     recordlog = treelog.RecordLog()
     yield treelog.FilterLog(recordlog, maxlevel=treelog.proto.Level.user)
     self.assertEqual(
         recordlog._messages,
         [('write', 'my message', treelog.proto.Level.user),
          ('open', 0, 'test.dat', 'w', treelog.proto.Level.info),
          ('close', 0, 'test1'), ('pushcontext', 'my context'),
          ('pushcontext', 'iter 1'),
          ('write', 'a', treelog.proto.Level.info), ('recontext', 'iter 2'),
          ('write', 'b', treelog.proto.Level.info), ('recontext', 'iter 3'),
          ('write', 'c', treelog.proto.Level.info), ('popcontext', ),
          ('open', 1, 'test.dat', 'wb', treelog.proto.Level.user),
          ('write', 'generating', treelog.proto.Level.info),
          ('close', 1, b'test2'), ('recontext', 'context step=0'),
          ('write', 'foo', treelog.proto.Level.info),
          ('recontext', 'context step=1'),
          ('write', 'bar', treelog.proto.Level.info), ('popcontext', ),
          ('open', 2, 'dbg.dat', 'wb', treelog.proto.Level.debug),
          ('close', 2, b'test4'),
          ('write', 'dbg', treelog.proto.Level.debug)])
Ejemplo n.º 5
0
 def __iter__(self):
     length = type(self).length
     if _cache.value is None:
         yield from self.resume_index([], 0)
     else:
         # The hash of `types.Immutable` uniquely defines this `Recursion`, so use
         # this to identify the cache directory.  All iterations are stored as
         # separate files, numbered '0000', '0001', ..., in this directory.
         hkey = self.__nutils_hash__.hex()
         cachepath = _cache.value / hkey
         cachepath.mkdir(exist_ok=True, parents=True)
         log.debug('[cache.Recursion {}] start iterating'.format(hkey))
         # The `history` variable is updated while reading from the cache and
         # truncated to the required length.
         history = []
         # The `exhausted` variable controls if we are reading items from the
         # cache (`False`) or we are computing values and writing to the cache.
         # Once `exhausted` is `True` we keep it there, even if at some point
         # there are cached items available.
         exhausted = False
         # The `stop` variable indicates if an exception is raised in `resume`.
         stop = False
         for i in itertools.count():
             cachefile = cachepath / '{:04d}'.format(i)
             cachefile.touch()
             with cachefile.open('r+b') as f:
                 log.debug(
                     '[cache.Recursion {}.{:04d}] acquiring lock'.format(
                         hkey, i))
                 _lock_file(f)
                 log.debug(
                     '[cache.Recursion {}.{:04d}] lock acquired'.format(
                         hkey, i))
                 if not exhausted:
                     try:
                         log_, stop, value = pickle.load(f)
                     except (pickle.UnpicklingError, IndexError):
                         log.debug(
                             '[cache.Recursion {}.{:04d}] failed to load, cache will be rewritten from this point'
                             .format(hkey, i))
                         exhausted = True
                     except EOFError:
                         log.debug(
                             '[cache.Recursion {}.{:04d}] cache exhausted'.
                             format(hkey, i))
                         exhausted = True
                     else:
                         log.debug(
                             '[cache.Recursion {}.{:04d}] load'.format(
                                 hkey, i))
                         log_.replay()
                         if stop and value is None:
                             value = StopIteration
                         history.append(value)
                         if len(history) > length:
                             history = history[1:]
                     if exhausted:
                         resume = self.resume_index(history, i)
                         f.seek(0)
                         del history
                 if exhausted:
                     # Disable the cache temporarily to prevent caching subresults *in* `func`.
                     log_ = log.RecordLog()
                     with disable(), log.add(log_):
                         try:
                             value = next(resume)
                         except Exception as e:
                             stop = True
                             value = e
                     log.debug('[cache.Recursion {}.{}] store'.format(
                         hkey, i))
                     pickle.dump((log_, stop, value), f)
             if not stop:
                 yield value
             elif isinstance(value, StopIteration):
                 return
             else:
                 raise value
Ejemplo n.º 6
0
 def setUp(self):
     self.recordlog = treelog.RecordLog(simplify=False)
     self.previous = treelog.current
     treelog.current = self.recordlog
Ejemplo n.º 7
0
 def test_replay_in_current(self):
     recordlog = treelog.RecordLog()
     recordlog.write('test', level=treelog.proto.Level.info)
     with self.assertSilent(), treelog.set(
             treelog.LoggingLog()), self.assertLogs('nutils'):
         recordlog.replay()