def cached_solver(solver, neuron_type, gain, bias, x, targets, rng=None, E=None): try: args, _, _, defaults = inspect.getargspec(solver) except TypeError: args, _, _, defaults = inspect.getargspec(solver.__call__) args = args[-len(defaults):] if rng is None and 'rng' in args: rng = defaults[args.index('rng')] if E is None and 'E' in args: E = defaults[args.index('E')] key = self._get_cache_key( solver_fn, solver, neuron_type, gain, bias, x, targets, rng, E) try: path, start, end = self._index[key] if self._fd is not None: self._fd.flush() with open(path, 'rb') as f: f.seek(start) solver_info, decoders = nco.read(f) except: logger.debug("Cache miss [%s].", key) decoders, solver_info = solver_fn( solver, neuron_type, gain, bias, x, targets, rng=rng, E=E) if not self.readonly: fd = self._get_fd() start = fd.tell() nco.write(fd, solver_info, decoders) end = fd.tell() self._index[key] = (fd.name, start, end) else: logger.debug("Cache hit [%s]: Loaded stored decoders.", key) return decoders, solver_info
def cached_solver(solver, neuron_type, gain, bias, x, targets, rng=None, E=None): try: args, _, _, defaults = inspect.getargspec(solver) except TypeError: args, _, _, defaults = inspect.getargspec(solver.__call__) args = args[-len(defaults):] if rng is None and 'rng' in args: rng = defaults[args.index('rng')] if E is None and 'E' in args: E = defaults[args.index('E')] key = self._get_cache_key( solver_fn, solver, neuron_type, gain, bias, x, targets, rng, E) path = self._key2path(key) try: with open(path, 'rb') as f: solver_info, decoders = nco.read(f) except: logger.info("Cache miss [{0}].".format(key)) decoders, solver_info = solver_fn( solver, neuron_type, gain, bias, x, targets, rng=rng, E=E) if not self.read_only: with open(path, 'wb') as f: nco.write(f, solver_info, decoders) else: logger.info( "Cache hit [{0}]: Loaded stored decoders.".format(key)) return decoders, solver_info
def cached_solver(conn, gain, bias, x, targets, rng=np.random, **uncached_kwargs): if not self._in_context: warnings.warn("Cannot use cached solver outside of " "`with cache` block.") return solver_fn(conn, gain, bias, x, targets, rng=rng, **uncached_kwargs) try: key = self._get_cache_key(conn.solver, conn.pre_obj.neuron_type, gain, bias, x, targets, rng) except FingerprintError as e: logger.debug("Failed to generate cache key: %s", e) return solver_fn(conn, gain, bias, x, targets, rng=rng, **uncached_kwargs) try: path, start, end = self._index[key] if self._fd is not None: self._fd.flush() with open(path, 'rb') as f: f.seek(start) info, decoders = nco.read(f) except Exception as err: if isinstance(err, KeyError): logger.debug("Cache miss [%s].", key) else: logger.exception("Corrupted cache entry [%s].", key) decoders, info = solver_fn(conn, gain, bias, x, targets, rng=rng, **uncached_kwargs) if not self.readonly: fd = self._get_fd() start = fd.tell() nco.write(fd, info, decoders) end = fd.tell() self._index[key] = (fd.name, start, end) else: logger.debug("Cache hit [%s]: Loaded stored decoders.", key) return decoders, info
def cached_solver(solver, neuron_type, gain, bias, x, targets, rng=None, E=None): try: args, _, _, defaults = inspect.getargspec(solver) except TypeError: args, _, _, defaults = inspect.getargspec(solver.__call__) args = args[-len(defaults):] if rng is None and 'rng' in args: rng = defaults[args.index('rng')] if E is None and 'E' in args: E = defaults[args.index('E')] key = self._get_cache_key( solver_fn, solver, neuron_type, gain, bias, x, targets, rng, E) path = self._key2path(key) try: with open(path, 'rb') as f: solver_info, decoders = nco.read(f) except: logger.debug("Cache miss [{0}].".format(key)) decoders, solver_info = solver_fn( solver, neuron_type, gain, bias, x, targets, rng=rng, E=E) if not self.read_only: with open(path, 'wb') as f: nco.write(f, solver_info, decoders) else: logger.debug( "Cache hit [{0}]: Loaded stored decoders.".format(key)) return decoders, solver_info
def test_read_errors(self, tmpdir): # use a bad magic string filepath = str(tmpdir.join("bad_magic_cache_file.txt")) with open(filepath, "wb") as fh: write_custom_nco_header(fh, magic_string="BAD") with open(filepath, "rb") as fh: with pytest.raises(CacheIOError, match="Not a Nengo cache object file"): nco.read(fh) # use a bad version number filepath = str(tmpdir.join("bad_version_cache_file.txt")) with open(filepath, "wb") as fh: write_custom_nco_header(fh, version=255) with open(filepath, "rb") as fh: with pytest.raises(CacheIOError, match="NCO protocol version 255 is"): nco.read(fh)
def cached_solver(conn, gain, bias, x, targets, rng=None, E=None, **uncached_kwargs): if not self._in_context: warnings.warn("Cannot use cached solver outside of " "`with cache` block.") return solver_fn(conn, gain, bias, x, targets, rng=rng, E=E, **uncached_kwargs) try: args, _, _, defaults = inspect.getargspec(conn.solver) except TypeError: args, _, _, defaults = inspect.getargspec(conn.solver.__call__) args = args[-len(defaults):] if rng is None and 'rng' in args: rng = defaults[args.index('rng')] if E is None and 'E' in args: E = defaults[args.index('E')] try: key = self._get_cache_key(conn.solver, conn.pre_obj.neuron_type, gain, bias, x, targets, rng, E) except FingerprintError as e: logger.debug("Failed to generate cache key: %s", e) return solver_fn(conn, gain, bias, x, targets, rng=rng, E=E, **uncached_kwargs) try: path, start, end = self._index[key] if self._fd is not None: self._fd.flush() with open(path, 'rb') as f: f.seek(start) info, decoders = nco.read(f) except Exception as err: if isinstance(err, KeyError): logger.debug("Cache miss [%s].", key) else: logger.exception("Corrupted cache entry [%s].", key) decoders, info = solver_fn(conn, gain, bias, x, targets, rng=rng, E=E, **uncached_kwargs) if not self.readonly: fd = self._get_fd() start = fd.tell() nco.write(fd, info, decoders) end = fd.tell() self._index[key] = (fd.name, start, end) else: logger.debug("Cache hit [%s]: Loaded stored decoders.", key) return decoders, info
def test_nco_roundtrip(tmpdir): tmpfile = tmpdir.join("test.nco") pickle_data = {"0": 237, "str": "foobar"} array = np.array([[4, 3], [2, 1]]) with tmpfile.open("wb") as f: nco.write(f, pickle_data, array) with tmpfile.open("rb") as f: pickle_data2, array2 = nco.read(f) assert pickle_data == pickle_data2 assert_equal(array, array2)
def test_nco_roundtrip(tmpdir): tmpfile = tmpdir.join('test.nco') pickle_data = {'0': 237, 'str': 'foobar'} array = np.array([[4, 3], [2, 1]]) with tmpfile.open('wb') as f: nco.write(f, pickle_data, array) with tmpfile.open('rb') as f: pickle_data2, array2 = nco.read(f) assert pickle_data == pickle_data2 assert_equal(array, array2)
def cached_solver(solver, neuron_type, gain, bias, x, targets, rng=None, E=None): try: args, _, _, defaults = inspect.getargspec(solver) except TypeError: args, _, _, defaults = inspect.getargspec(solver.__call__) args = args[-len(defaults):] if rng is None and 'rng' in args: rng = defaults[args.index('rng')] if E is None and 'E' in args: E = defaults[args.index('E')] key = self._get_cache_key(solver_fn, solver, neuron_type, gain, bias, x, targets, rng, E) try: path, start, end = self._index[key] if self._fd is not None: self._fd.flush() with open(path, 'rb') as f: f.seek(start) solver_info, decoders = nco.read(f) except: logger.debug("Cache miss [%s].", key) decoders, solver_info = solver_fn(solver, neuron_type, gain, bias, x, targets, rng=rng, E=E) if not self.readonly: fd = self._get_fd() start = fd.tell() nco.write(fd, solver_info, decoders) end = fd.tell() self._index[key] = (fd.name, start, end) else: logger.debug("Cache hit [%s]: Loaded stored decoders.", key) return decoders, solver_info