def __init__(self, path_to_binary, path_to_cache=None, save_every_n=1000, terminator="0"): self.path = path_to_binary self.needs_reset = True self.cache = {} self.error_cache = pygtrie.StringTrie(separator=" ") self.invalid_cache = pygtrie.PrefixSet() self.terminator = terminator assert terminator not in self.get_alphabet( ), f"Terminator {terminator} in alphabet, please choose a different one" # Save cache to file every n queries self.save_every_n = save_every_n self.n_queries = 0 if path_to_cache is None: print("No cache path given, not using cache") self.cachepath = None else: print("Cache dir:", str(Path(path_to_cache).absolute())) # Hash the binary to find it's cache folder with open(self.path, 'rb') as f: hash = hashlib.sha256(f.read()).hexdigest() # Check if cache exists for the given binary self.cachepath = Path(path_to_cache).joinpath(hash) if self.cachepath.is_dir(): self._load_cache() else: os.mkdir(self.cachepath)
def test_prefix_set(self): """PrefixSet test.""" ps = pygtrie.PrefixSet(factory=self._TRIE_CLS) short_key = self.key_from_key(self._SHORT_KEY) long_key = self.key_from_key(self._LONG_KEY) very_long_key = self.key_from_key(self._VERY_LONG_KEY) other_key = self.key_from_key(self._OTHER_KEY) for key in (self._LONG_KEY, self._VERY_LONG_KEY): ps.add(key) self.assertEqual(1, len(ps)) self.assertEqual([long_key], list(ps.iter())) self.assertEqual([long_key], list(iter(ps))) self.assertEqual([long_key], list(ps.iter(self._SHORT_KEY))) self.assertEqual([long_key], list(ps.iter(self._LONG_KEY))) self.assertEqual([very_long_key], list(ps.iter(self._VERY_LONG_KEY))) self.assertEqual([], list(ps.iter(self._OTHER_KEY))) ps.add(self._SHORT_KEY) self.assertEqual(1, len(ps)) self.assertEqual([short_key], list(ps.iter())) self.assertEqual([short_key], list(iter(ps))) self.assertEqual([short_key], list(ps.iter(self._SHORT_KEY))) self.assertEqual([long_key], list(ps.iter(self._LONG_KEY))) self.assertEqual([], list(ps.iter(self._OTHER_KEY))) ps.add(self._OTHER_KEY) self.assertEqual(2, len(ps)) self.assertEqual(sorted((short_key, other_key)), sorted(ps.iter())) self.assertEqual([short_key], list(ps.iter(self._SHORT_KEY))) self.assertEqual([long_key], list(ps.iter(self._LONG_KEY))) self.assertEqual([other_key], list(ps.iter(self._OTHER_KEY)))
def test_prefix_set_pickling_proto3(self): pickled = ( 'gANjcHlndHJpZQpQcmVmaXhTZXQKcQApgXEBfXECWAUAAABfdHJpZXEDY3B5Z3RyaW' 'UKQ2hhclRyaWUKcQQpgXEFfXEGKFgFAAAAX3Jvb3RxB2NweWd0cmllCl9Ob2RlCnEI' 'KYFxCV1xCihLA1gBAAAAZnELWAEAAABvcQxoDIhK/f///0sDWAEAAABicQ1YAQAAAG' 'FxDlgBAAAAcnEPiEr/////SwFYAQAAAHpxEIhlYlgHAAAAX3NvcnRlZHERiXVic2Iu' ) want = pygtrie.PrefixSet(('foo', 'bar', 'baz'), factory=pygtrie.CharTrie) self.assertUnpickling(want, pickled)
def _test_prefix_set_copy_impl(self, make_copy): ps = pygtrie.PrefixSet(factory=self._TRIE_CTOR) ps.add(self._SHORT_KEY) self.assertEqual(1, len(ps)) cpy = make_copy(ps) self.assertEqual(ps, cpy) ps.add(self._OTHER_KEY) self.assertNotEqual(ps, cpy)
def __setstate__(self, state): """ Fix unpickling. """ log_level = state["_log"] self._log = logging.getLogger(self.NAME) self._log.setLevel(log_level) self._asdf = None for key in ("_meta", "_source", "_size", "_initial_version"): setattr(self, key, state[key]) self._compression_prefixes = pygtrie.PrefixSet(self.NO_COMPRESSION) self._load_tree(state["tree"])
def test_prefix_set_pickling_proto0(self): pickled = ( 'Y2NvcHlfcmVnCl9yZWNvbnN0cnVjdG9yCnAwCihjcHlndHJpZQpQcmVmaXhTZXQKcD' 'EKY19fYnVpbHRpbl9fCm9iamVjdApwMgpOdHAzClJwNAooZHA1ClZfdHJpZQpwNgpn' 'MAooY3B5Z3RyaWUKQ2hhclRyaWUKcDcKZzIKTnRwOApScDkKKGRwMTAKVl9yb290Cn' 'AxMQpnMAooY3B5Z3RyaWUKX05vZGUKcDEyCmcyCk50cDEzClJwMTQKKGxwMTUKTDNM' 'CmFWZgpwMTYKYVZvCnAxNwphZzE3CmFJMDEKYUwtM0wKYUwzTAphVmIKcDE4CmFWYQ' 'pwMTkKYVZyCnAyMAphSTAxCmFMLTFMCmFMMUwKYVZ6CnAyMQphSTAxCmFic1Zfc29y' 'dGVkCnAyMgpJMDAKc2JzYi4=') want = pygtrie.PrefixSet(('foo', 'bar', 'baz'), factory=pygtrie.CharTrie) self.assertUnpickling(want, pickled)
def __init__(self, path_to_binary): self.path = path_to_binary self.needs_reset = True # These are only used if an external RERS cache is not hooked up self.separator = " " self.cache = {} self.error_cache = pygtrie.StringTrie(separator=self.separator) self.invalid_cache = pygtrie.PrefixSet() # Set up external process and communication self.proc = Popen(path_to_binary, bufsize=0, stdout=PIPE, stdin=PIPE, stderr=STDOUT) self.q = Queue() self.t = Thread(target=self._enqueue, args=(self.proc.stdout, self.q)) self.t.daemon = True self.t.start()
def __init__(self, **kwargs): """ Initialize a new Model instance. :param kwargs: Everything is ignored except ``log_level``. """ assert self.NAME is not None assert self.VENDOR is not None assert self.DESCRIPTION is not None self._log = logging.getLogger(self.NAME) self._log.setLevel(kwargs.get("log_level", logging.DEBUG)) self._source = None self._meta = generate_new_meta(self.NAME, self.DESCRIPTION, self.VENDOR, self.LICENSE) self._asdf = None self._size = 0 self._initial_version = None assert isinstance(self.NO_COMPRESSION, tuple), "NO_COMPRESSION must be a tuple" self._compression_prefixes = pygtrie.PrefixSet(self.NO_COMPRESSION)
def __init__(self, path_to_binary, path_to_cache=None, save_every_n=100000): self.path = path_to_binary self.needs_reset = True self.cache = {} self.error_cache = pygtrie.StringTrie(separator=" ") self.invalid_cache = pygtrie.PrefixSet() # Set up external process and communication self.proc = Popen(path_to_binary, bufsize=0, stdout=PIPE, stdin=PIPE, stderr=STDOUT) self.q = Queue() self.t = Thread(target=self._enqueue, args=(self.proc.stdout, self.q)) self.t.daemon = True self.t.start() # Save cache to file every n queries self.save_every_n = save_every_n self.n_queries = 0 if path_to_cache is None: print("No cache path given, not using cache") self.cachepath = None else: print("Cache dir:", str(Path(path_to_cache).absolute())) # Hash the binary to find it's cache folder with open(self.path, 'rb') as f: hash = hashlib.sha256(f.read()).hexdigest() # Check if cache exists for the given binary self.cachepath = Path(path_to_cache).joinpath(hash) if self.cachepath.is_dir(): self._load_cache() else: Path(self.cachepath).mkdir(parents=True, exist_ok=True)
def copy_include_dirs(tile): """Copy all include directories that this tile defines as products in build/output/include """ if 'products' not in tile.settings: return incdirs = tile.settings['products'].get('include_directories', []) incdirs = map(lambda x: os.path.normpath(utilities.join_path(x)), incdirs) incdirs = sorted(incdirs, key=lambda x: len(x)) seen_dirs = pygtrie.PrefixSet( factory=lambda: pygtrie.StringTrie(separator=os.path.sep)) env = Environment(tools=[]) # all include directories are relative to the firmware/src directory outputbase = os.path.join('build', 'output', 'include') inputbase = os.path.join('firmware', 'src') for inc in incdirs: if inc in seen_dirs: continue relinput = os.path.join(inputbase, inc) finaldir = os.path.join(outputbase, inc) for folder, subdirs, filenames in os.walk(relinput): relfolder = os.path.relpath(folder, relinput) for filename in filenames: if filename.endswith(".h"): infile = os.path.join(folder, filename) outfile = os.path.join(finaldir, relfolder, filename) env.Command([outfile], [infile], Copy("$TARGET", "$SOURCE")) seen_dirs.add(inc)
def build_pygtrie(geohash_list): trie = pygtrie.PrefixSet(geohash_list) for hash in geohash_list: trie.add(hash) return GTrie(trie)
tty.setraw(0) return sys.stdin.read(1) finally: termios.tcsetattr(0, termios.TCSADRAIN, attr) except ImportError: try: from msvcrt import getch # pylint: disable=import-error except ImportError: sys.exit(0) print('\nPrefix set') print('==========\n') ps = pygtrie.PrefixSet(factory=pygtrie.StringTrie) ps.add('/etc/rc.d') ps.add('/usr/local/share') ps.add('/usr/local/lib') ps.add('/usr') # Will handle the above two as well ps.add('/usr/lib') # Does not change anything print('Path prefixes:', ', '.join(iter(ps))) for path in ('/etc', '/etc/rc.d', '/usr', '/usr/local', '/usr/local/lib'): print('Is', path, 'in the set:', ('yes' if path in ps else 'no')) print('\nDictionary test') print('===============\n')
def make_module_matcher(modules: Iterable[str]) -> pygtrie.PrefixSet: return pygtrie.PrefixSet(iterable=modules, factory=pygtrie.StringTrie, separator=".")
def check(iterable): ps = pygtrie.PrefixSet(iterable, factory=self._TRIE_CTOR) self.assertEqual(1, len(ps)) self.assertEqual([self.key_from_key(self._SHORT_KEY)], list(ps))