def html_content(self): cache = Cache( self._get_cache_directory(), eviction_policy="least-frequently-used" ) cache_key = self.uri.to_uri_string() if cached_html_content := cache.get(cache_key): return cached_html_content.decode("utf-8")
class FactBaseStats: def __init__(self, generator): self._cache = Cache(os.path.join(os.path.dirname(__file__), 'stats', self.__class__.__name__, generator), size_limit=int(2e9)) self._dataset = None self._table = None def init(self, dataset, table): self._dataset = dataset self._table = table self._cache.set((self._dataset, self._table), { 'exact': 0, 'strict': 0, 'loose': 0, 'none': 0 }) def _incr(self, field): entry = self._cache.get((self._dataset, self._table)) entry[field] += 1 self._cache.set((self._dataset, self._table), entry) def incr_exact(self): self._incr('exact') def incr_strict(self): self._incr('strict') def incr_loose(self): self._incr('loose') def incr_empty(self): self._incr('none') def get_dataset_stats(self, dataset): stats_keys = ['exact', 'strict', 'loose', 'none'] stats = np.zeros(4, dtype=int) tables = 0 for key in self._cache.iterkeys(): if key[0] == dataset: tables += 1 entry = self._cache.get(key) stats[0] += entry['exact'] stats[1] += entry['strict'] stats[2] += entry['loose'] stats[3] += entry['none'] cells = sum(stats) print('Dataset:', dataset) print('Tables:', tables) print('Cells:', cells) print('Stats:', dict(zip(stats_keys, zip(stats, np.round(stats / cells, 4)))))
class CachedClient(Client): def __init__(self, apikey, agent="unknown", host=None, cache_dir=None): super().__init__(apikey, agent=agent, host=host) self.cache = Cache(cache_dir, disk=VtCache, disk_compress_level=6, tag_index=True) self.cache_dir = cache_dir self.logger = logging.getLogger('kfinny.cachedvt.CachedClient') def _get(self, resource): data, tag = self.cache.get(resource, tag=True) if data and tag in ['sha1', 'md5']: data, tag = self.cache.get(data, tag=True) if data and tag == 'object': data = Object.from_dict(data) return data, tag def _put_object(self, obj): self.cache.set(obj.sha256, obj.to_dict(), tag='object') self.cache.set(obj.sha1, obj.sha256, tag='sha1') self.cache.set(obj.md5, obj.sha256, tag='md5') def _put_error(self, resource, error): self.cache.set(resource, { 'resource': resource, 'code': error.code, 'message': error.message }, tag='error') def yield_file_report(self, resource, include_notfound=False): queryset = set() if isinstance(resource, str): resource = resource.split(',') if isinstance(resource, (tuple, list, set, frozenset)): for r in resource: data, tag = self._get(r) if data is not None: if tag == 'object' or include_notfound: yield data else: queryset.add(r) resource = sorted(queryset) for i in resource: try: obj = self.get_object(f'/files/{i}') self._put_object(obj) yield obj except APIError as e: self._put_error(i, e) self.logger.debug("hits = {}, misses = {}".format(*self.cache.stats()))
class localCache(object): def __init__(self, config): self.cache_file = config.ad_cache_file def __enter__(self): self.cache = Cache(self.cache_file) self.cache.expire() return self def __exit__(self, exctype, exception, traceback): self.cache.close() def correct_ldap_group_list(self, group_list): # DELETE just deleted group from list deleted_groups = list() if len(self.cache) > 0: for group in group_list: if group.get("name") in self.cache and self.cache.get( group.get("name")).get("cache_state") == "deleted": log.info( 'Group{0} in state "deleted" founded in cache'.format( group.get("name"))) deleted_groups.append(group) corrected_group_list = [ x for x in group_list if x not in deleted_groups ] # ADD just created group to list created_groups = list() groups_name_list = [group.get("name") for group in group_list] if len(self.cache) > 0: cached = self.cache._sql('SELECT key FROM Cache').fetchall() for group in cached: if self.cache.get(group[0]).get("name") not in groups_name_list and\ self.cache.get(group[0]).get("cache_state") == "created": log.info( 'Group{0} in state "created" founded in cache'.format( group[0])) created_groups.append(self.cache.get(group[0])) corrected_group_list.extend( [x for x in created_groups if x not in groups_name_list]) return corrected_group_list
class Cache(object): def __init__(self): from diskcache import Cache self.cache = Cache('/tmp/navan') self.cache.stats(enable=True) def get(self, *args): return self.cache.get(':'.join(args)) def set(self, *args, **kwargs): expire = kwargs.get('expire') if len(args) < 2: raise Exception('cache set must contain `key` and `value`') key, value = args[:-1], args[-1] key = ':'.join(key) return self.cache.set(key, value, expire) def get_json(self, *args): ret = self.get(*args) if not ret: return ret return json.loads(ret) def set_json(self, *args, **kwargs): args = list(args) args[-1] = json.dumps(args[-1]) return self.set(*args, **kwargs)
def worker(queue, eviction_policy, processes, threads): timings = co.defaultdict(list) cache = Cache('tmp', eviction_policy=eviction_policy) for index, (action, key, value) in enumerate(iter(queue.get, None)): start = time.time() try: if action == 'set': cache.set(key, value, expire=EXPIRE) elif action == 'get': result = cache.get(key) else: assert action == 'delete' cache.delete(key) except Timeout: miss = True else: miss = False stop = time.time() if (action == 'get' and processes == 1 and threads == 1 and EXPIRE is None): assert result == value if index > WARMUP: delta = stop - start timings[action].append(delta) if miss: timings[action + '-miss'].append(delta) queue.put(timings) cache.close()
class BaseCacheAnalyzer(BaseAnalyzer): def __init__(self, cache_location=None, force=False): super().__init__() self.cache_location = cache_location self.cache = None self.force = force def initialize(self): from diskcache import Cache self.cache = Cache(self.cache_location or self.uid + "_cache") def filter(self, simulation): return self.force or not self.is_in_cache(simulation.id) def to_cache(self, key, data): self.cache.set(key, data) def from_cache(self, key): return self.cache.get(key) def is_in_cache(self, key): return key in self.cache def __del__(self): if self.cache: self.cache.close() @property def keys(self): return list(self.cache.iterkeys()) if self.cache else None
class CachedGeocoder(): def __init__(self, geocoder_from_geopy, identifier): self.geocoder_from_geopy = geocoder_from_geopy self.identifier = identifier self.cache = Cache('tmp/' + identifier) def get_geocoder_reply(self, query): returned = self.cache.get(query) if returned != None: return returned['reply'] else: uncached = self.get_uncached_geocoder_reply(query) self.cache.set(query, {'reply': uncached}) return uncached def get_uncached_geocoder_reply(self, query): print(query, "for", self.identifier, "was not cached") while True: time.sleep(1) try: return self.geocoder_from_geopy.geocode(query) except geopy.exc.GeocoderUnavailable: sleep_time_in_s = 10 print("will retry after", sleep_time_in_s, "seconds") time.sleep(sleep_time_in_s) continue
def worker(queue, eviction_policy): timings = {'get': [], 'set': [], 'delete': []} cache = Cache('tmp', eviction_policy=eviction_policy) for index, (action, key, value) in enumerate(iter(queue.get, None)): start = time.time() if action == 'set': cache.set(key, value, expire=EXPIRE) elif action == 'get': result = cache.get(key) else: assert action == 'delete' cache.delete(key) stop = time.time() if action == 'get' and PROCESSES == 1 and THREADS == 1 and EXPIRE is None: assert result == value if index > WARMUP: timings[action].append(stop - start) queue.put(timings) cache.close()
def test_cache(): # test caching of complex data types my_cache_dir = os.path.join(user_cache_dir("genomepy"), str(linux)) os.makedirs(my_cache_dir, exist_ok=True) cache = Cache(directory=my_cache_dir, size_limit=1000000) test = ["a", "b", "c"] @cache.memoize(expire=10, tag="expensive_function") def expensive_function(data): return pd.DataFrame(data) # Get full function name https://github.com/grantjenks/python-diskcache/blob/master/diskcache/core.py def full_name(func): """Return full name of `func` by adding the module and function name.""" return func.__module__ + "." + func.__qualname__ # cache key tuple cache_key = ( full_name(expensive_function), test, None, ) # check that results before/after caching are identical expected = expensive_function(test) cached_data = cache.get(cache_key) assert cached_data.equals( expected), "Cached data does not match expected data" rmtree(my_cache_dir, ignore_errors=True)
class Cache: def __init__(self): self.cache = DiskCache(CACHE_PATH, size_limit=CACHE_SIZE) def get(self, key): value = self.cache.get(key) if value: logging.debug('Hit cache key %s' % key) return value def clear(self): return self.cache.clear() def set(self, key, value): return self.cache.set(key, value) def get_or(self, key, _or): """Get a key's value, or use function's return value to set""" if key in self.cache: logging.debug('Hit cache key %s' % key) return self.cache[key] value = _or() self.cache.set(key, value) return value
def worker(queue, eviction_policy, processes, threads): timings = {'get': [], 'set': [], 'delete': []} cache = Cache('tmp', eviction_policy=eviction_policy) for index, (action, key, value) in enumerate(iter(queue.get, None)): start = time.time() if action == 'set': cache.set(key, value, expire=EXPIRE) elif action == 'get': result = cache.get(key) else: assert action == 'delete' cache.delete(key) stop = time.time() if action == 'get' and processes == 1 and threads == 1 and EXPIRE is None: assert result == value if index > WARMUP: timings[action].append(stop - start) queue.put(timings) cache.close()
class CacheInteraction: def __init__(self, dimension=DimensionType.DIM_2D): self._cache = Cache(settings.CACHE_ROOT) self._dimension = dimension def __del__(self): self._cache.close() def get_buff_mime(self, chunk_number, quality, db_data): chunk, tag = self._cache.get('{}_{}_{}'.format(db_data.id, chunk_number, quality), tag=True) if not chunk: chunk, tag = self.prepare_chunk_buff(db_data, quality, chunk_number) self.save_chunk(db_data.id, chunk_number, quality, chunk, tag) return chunk, tag def prepare_chunk_buff(self, db_data, quality, chunk_number): from cvat.apps.engine.frame_provider import FrameProvider # TODO: remove circular dependency writer_classes = { FrameProvider.Quality.COMPRESSED : Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type == DataChoice.VIDEO else ZipCompressedChunkWriter, FrameProvider.Quality.ORIGINAL : Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO else ZipChunkWriter, } image_quality = 100 if writer_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality mime_type = 'video/mp4' if writer_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip' kwargs = {} if self._dimension == DimensionType.DIM_3D: kwargs["dimension"] = DimensionType.DIM_3D writer = writer_classes[quality](image_quality, **kwargs) images = [] buff = BytesIO() upload_dir = { StorageChoice.LOCAL: db_data.get_upload_dirname(), StorageChoice.SHARE: settings.SHARE_ROOT }[db_data.storage] if hasattr(db_data, 'video'): source_path = os.path.join(upload_dir, db_data.video.path) reader = VideoDatasetManifestReader(manifest_path=db_data.get_manifest_path(), source_path=source_path, chunk_number=chunk_number, chunk_size=db_data.chunk_size, start=db_data.start_frame, stop=db_data.stop_frame, step=db_data.get_frame_step()) for frame in reader: images.append((frame, source_path, None)) else: reader = ImageDatasetManifestReader(manifest_path=db_data.get_manifest_path(), chunk_number=chunk_number, chunk_size=db_data.chunk_size, start=db_data.start_frame, stop=db_data.stop_frame, step=db_data.get_frame_step()) for item in reader: source_path = os.path.join(upload_dir, f"{item['name']}{item['extension']}") images.append((source_path, source_path, None)) writer.save_as_chunk(images, buff) buff.seek(0) return buff, mime_type def save_chunk(self, db_data_id, chunk_number, quality, buff, mime_type): self._cache.set('{}_{}_{}'.format(db_data_id, chunk_number, quality), buff, tag=mime_type)
class FileCache(BaseCache): """ BaseCache implementation using files to store the data. This implementation uses diskcache.Cache see http://www.grantjenks.com/docs/diskcache/api.html#cache for more informations This cache requires you to install diskcache using `pip install diskcache` """ def __init__(self, path, **settings): """ Constructor Arguments: path {String} -- The path on the disk to save the data settings {dict} -- The settings values for diskcache """ from diskcache import Cache self._cache = Cache(path, **settings) def __del__(self): """ Close the connection as the cache instance is deleted. Safe to use as there are no circular ref. """ self._cache.close() def set(self, key, value, timeout=300): expire_time = None if timeout == 0 else timeout self._cache.set(_hash(key), value, expire=expire_time) def get(self, key, default=None): return self._cache.get(_hash(key), default) def invalidate(self, key): self._cache.delete(_hash(key))
class CacheProxy: def __init__(self, script): self.config = get_configs() collectors_dir = self.config.get('base', 'collectors_dir') self.cache = Cache( os.path.join(collectors_dir, 'cache/script/', script)) def get(self, key): return self.cache.get(key) def set(self, key, value): self.cache.set(key, value) def delete(self, key): self.cache.delete(key) def close(self): self.cache.close() def counter_to_gauge(self, key, value): last_value = self.get(key) self.set(key, value) if last_value is None: return None gauge = value - last_value if gauge < 0 or gauge > last_value: return None return gauge
def add_from_engine(cls, engine, **kwargs): app = kwargs.get('app') session = kwargs.get('session', db.session) bind_name = kwargs.get('bind_name') backend_version = kwargs.get('backend_version') use_cache = kwargs.get('use_cache', False) and app if use_cache: flush_cache = kwargs.get('flush_cache', False) cache_path = kwargs.get('cache_path', app.config.get('DISKCACHE_PATH')) cache_timeout = kwargs.get('DISKCACHE_DBEX_DATABASE_TIMEOUT') cache = Cache(cache_path) if flush_cache: cache.pop(engine) try: if use_cache: inspector = cache.get(engine) else: inspector = inspect(engine) if use_cache: cache.set(engine, inspector, cache_timeout) except OperationalError as e: logger.error("cant inspect engine %s" % engine) raise e except Exception as e: raise e db_name = db_engine_to_name(engine) try: info = get_discovered_db_engine_info(bind_name) except NoBindNameFoundError: logger.warning("No info found to engine bind name '%s'" % bind_name) info = '' if not backend_version and info: try: backend_version = info['backend_version'] except TypeError as e: pass except KeyError as e: pass except Exception as e: raise e d = Database(name=db_name, bind_name=bind_name, engine=engine.name, driver=engine.driver, backend_version=backend_version) table_names = inspector.get_table_names() for table_name in table_names: logger.debug("table_name: %s" % (table_name,)) t = Table(name=table_name) column_names = inspector.get_columns(table_name) for column in column_names: c = Column(**column) t.columns.append(c) d.tables.append(t) session.add(d) if kwargs.get('db_session_commit_enabled', True): session.commit()
class CacheInteraction: def __init__(self, dimension=DimensionType.DIM_2D): self._cache = Cache(settings.CACHE_ROOT) self._dimension = dimension def __del__(self): self._cache.close() def get_buff_mime(self, chunk_number, quality, db_data): chunk, tag = self._cache.get('{}_{}_{}'.format(db_data.id, chunk_number, quality), tag=True) if not chunk: chunk, tag = self.prepare_chunk_buff(db_data, quality, chunk_number) self.save_chunk(db_data.id, chunk_number, quality, chunk, tag) return chunk, tag def prepare_chunk_buff(self, db_data, quality, chunk_number): from cvat.apps.engine.frame_provider import FrameProvider # TODO: remove circular dependency writer_classes = { FrameProvider.Quality.COMPRESSED : Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type == DataChoice.VIDEO else ZipCompressedChunkWriter, FrameProvider.Quality.ORIGINAL : Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO else ZipChunkWriter, } image_quality = 100 if writer_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality mime_type = 'video/mp4' if writer_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip' kwargs = {} if self._dimension == DimensionType.DIM_3D: kwargs["dimension"] = DimensionType.DIM_3D writer = writer_classes[quality](image_quality, **kwargs) images = [] buff = BytesIO() upload_dir = { StorageChoice.LOCAL: db_data.get_upload_dirname(), StorageChoice.SHARE: settings.SHARE_ROOT }[db_data.storage] if os.path.exists(db_data.get_meta_path()): source_path = os.path.join(upload_dir, db_data.video.path) meta = PrepareInfo(source_path=source_path, meta_path=db_data.get_meta_path()) for frame in meta.decode_needed_frames(chunk_number, db_data): images.append(frame) writer.save_as_chunk([(image, source_path, None) for image in images], buff) else: with open(db_data.get_dummy_chunk_path(chunk_number), 'r') as dummy_file: images = [os.path.join(upload_dir, line.strip()) for line in dummy_file] writer.save_as_chunk([(image, image, None) for image in images], buff) buff.seek(0) return buff, mime_type def save_chunk(self, db_data_id, chunk_number, quality, buff, mime_type): self._cache.set('{}_{}_{}'.format(db_data_id, chunk_number, quality), buff, tag=mime_type)
def load_game(self) -> FrostbiteGame: """ Load the game from the cache, or reinitialize if it does not exists. """ cache = Cache(self.cache_path) LOG.info("Loading game from cache") fbg = cache.get(self.CACHE_KEY_GAME) if not fbg or not isinstance(fbg, FrostbiteGame): LOG.info("Cache entry invalid or not found, reinitializing..") fbg = super().load_game() cache.set(self.CACHE_KEY_GAME, fbg) return fbg
class KeyValueDB(Generic[NativeType, StorageType], abc.ABC): """Interface for concrete DB backend.""" _native_type: NativeType _storage_type: StorageType def __init__(self, datebase_dir: Path): if not datebase_dir.exists(): datebase_dir.mkdir(mode=0o750, parents=True) self._cache = Cache(str(datebase_dir)) def __contains__(self, key: Any) -> bool: return self._cache.__contains__(key) def __delitem__(self, key: Any) -> bool: return self._cache.__delitem__(key) def __getitem__(self, key: Any) -> NativeType: return self._storage_to_native_type(self._cache.__getitem__(key)) def __setitem__(self, key: Any, value: NativeType) -> None: return self._cache.__setitem__(key, self._native_to_storage_type(value)) def _native_to_storage_type(self, value: NativeType) -> StorageType: if self._native_type is self._storage_type or self._storage_type is None: return cast(StorageType, value) else: return self._storage_type(value) def _storage_to_native_type(self, value: StorageType) -> NativeType: if self._native_type is self._storage_type or self._native_type is None: return cast(NativeType, value) else: return self._native_type(value) def close(self, *args, **kwargs) -> None: return self._cache.close() def get(self, key: Any, default: Any = None, *args, **kwargs) -> Union[Any, NativeType]: value = self._cache.get(key, default, *args, **kwargs) if value is default: return default else: return self._storage_to_native_type(value) def set(self, key: Any, value: NativeType, *args, **kwargs) -> bool: return self._cache.set(key, self._native_to_storage_type(value), *args, **kwargs) def touch(self, *args, **kwargs) -> bool: return self._cache.touch(*args, **kwargs)
class CacheWrapper: def __init__(self, path, size): self._cache = Cache(path, size_limit=size) def update_cache_entries(self, entries: List[KVPair]): """ Update cache entries :param entries: a list of pairs key-value :return: """ for entry in entries: self._cache.set(entry.key, entry.value) # ALWAYS override! def set_entry(self, entry: KVPair): self._cache.set(entry.key, entry.value) def get_cached_entries(self, keys: List[Any]) -> Tuple[List[Any], List[Any]]: """ Retrieve cached entries :param keys: a list of keys to retrieve :return: a tuple (<cached results>, <missing entries>) """ to_compute = [] cached_entries = [] for key in keys: entry = self._cache.get(key) if entry is None: to_compute.append(key) else: cached_entries.append(entry) return cached_entries, to_compute def get_cached_entry(self, key: Any) -> Any: return self._cache.get(key)
class DiscCacheWrapper(): DISC_CACHE_PATH = "./tmp/flickr-auth-disc-cache" def __init__(self): self.disc_cache = Cache(directory=DiscCacheWrapper.DISC_CACHE_PATH) def get(self, key): # This can raise a diskcache.Timeout error if it fails to talk to its database return self.disc_cache.get(key=key, default=None, retry=False) def set(self, key, value, timeout=None): # This can raise a diskcache.Timeout error if it fails to talk to its database self.disc_cache.set(key=key, value=value, expire=timeout, retry=False) def delete(self, key): self.disc_cache.delete(key=key)
class Cache(object): def __init__(self): try: self.cache = DC('./tmp') except Exception as ex: print('Get an exception with diskcache open: {}'.format(ex)) self.cache = None def __del__(self): try: self.cache.close() except Exception as ex: print('Get an exception with diskcache close: {}'.format(ex)) def set(self, key, value): if self.cache is not None: self.cache.set(key, BytesIO(value), read=True, tag=u'data') def get(self, key): if self.cache is not None: value = self.cache.get(key, default=b'', read=True, tag=True) if value is not None and value != b'': return value return None def pop(self, key): if self.cache is not None: value = self.cache.pop(key, default=b'', read=True, tag=True) if value is not None and value != b'': return value return None def delete(self, key): if self.cache is not None: self.cache.delete(key) def create_index(self): if self.cache is not None: self.cache.create_tag_index() return self.cache.tag_index return None def clear_all(self): if self.cache is not None: self.cache.clear()
def main(user, password, platform, codes, no_cache, cache_dir): """redeem all active or individual shift codes for Borderlands""" no_cache = True if codes else no_cache platform = Shift.Platforms[platform] try: cache = Cache(str(Path(cache_dir, 'blshift.cache')), eviction_policy='none') if not no_cache else {} redeemed = cache.get(user, set()) with Shift(platform, user, password) as shift: if codes: codes = [{ 'code': x['code'], 'reward': x['offer_title_text'] } for x in (shift.info(y) for y in codes) if x] else: codes = shift.get_codes() if redeemed.issuperset(x['code'] for x in codes): if sys.stdout.isatty(): cl.echo('No new codes found') raise SystemExit(0) width = max( len(x['reward']) + len(x['code']) for x in codes if x['code'] not in redeemed) try: with ThreadPool(8, redeem_code_init, (shift, )) as pool: for code, success, msg in pool.imap_unordered( redeem_code, (x for x in codes if x['code'] not in redeemed)): pad = width - (len(code['code']) + len(code['reward'])) msg = f"{cl.style('Success', fg='green') if success else cl.style(msg.replace('_', ' ').title(), fg='red')}" cl.echo( f"{cl.style(code['code'], bold=True)} {code['reward']}: {' ' * pad}{msg}", err=(not success)) redeemed.add(code['code']) finally: cache[user] = redeemed.intersection(x['code'] for x in codes) except Exception as err: cl.echo(f"{cl.style('Error', fg='red')}: {str(err)}", err=True)
def get_ingress_status(domain, code): cache = Cache('/tmp/check_k8s_status.cache') data_cache = cache.get(domain, default=b'', read=True, expire_time=False) if domain in cache: # print(data_cache) if code in data_cache: print(data_cache[code]) else: print(0.0) exit(0) data = get_status_code_ratio(domain) expire_time = es_query_duration / 1000 cache.set(domain, data, expire=expire_time) if code in data: print(data[code]) else: print(0.0)
class DiskCache(): def __init__(self, cache_dir, ttl=None): self.ttl = ttl self.cache = Cache(cache_dir, eviction_policy='least-recently-used') def __getitem__(self, key): return self.cache[key] def __setitem__(self, key, value): return self.cache.set(key, value, expire=self.ttl) def get(self, key, default=None): return self.cache.get(key, default=default) def set(self, key, value): return self.cache.set(key, value, expire=self.ttl) def clear(self): self.cache.clear()
class BaseCacheAnalyzer(BaseAnalyzer): def __init__(self, cache_location=None, force=False, delete_cache_when_done=False, **kwargs): super().__init__(**kwargs) self.cache_location = cache_location self.cache = None self.force = force self.delete_cache_when_done = delete_cache_when_done def initialize(self): from diskcache import Cache self.cache = Cache(self.cache_location or self.uid + "_cache") def filter(self, simulation): return self.force or not self.is_in_cache(simulation.id) def to_cache(self, key, data): self.cache.set(key, data) def from_cache(self, key): return self.cache.get(key) def is_in_cache(self, key): return key in self.cache def destroy(self): if self.cache: self.cache.close() if self.cache and self.delete_cache_when_done and os.path.exists( self.cache.directory): cache_directory = self.cache.directory del self.cache shutil.rmtree(cache_directory) @property def keys(self): return list(self.cache.iterkeys()) if self.cache else None
def main(source, clear_cache): comi_dir_path = '/tmp/comi' comi_dir_path_object = Path(comi_dir_path) comi_dir_path_object.mkdir(exist_ok=True) cache_dir_path = '/tmp/comi/cache' cache = Cache(cache_dir_path) if clear_cache: cache.clear() print('Cache cleared') return True if source: if 'github.com' in source and 'blob' in source: url = source.replace('blob', 'raw') else: url = source else: url = 'https://github.com/commmands/commands/raw/master/commands_1.commands' cache_value = cache.get(url) temp_commands_path = '/tmp/comi/temp_commands' temp_commands_path_object = Path(temp_commands_path) temp_commands_path_object.parent.mkdir(parents=True, exist_ok=True) if cache_value: temp_commands_path_object.write_text(cache_value) else: commands_response = requests.get(url) commands = commands_response.text temp_commands_path_object.write_text(commands) cache.set(url, commands) perl_part = "perl -e 'ioctl STDOUT, 0x5412, $_ for split //, do{ chomp($_ = <>); $_ }'" command = f"cat {temp_commands_path} | fzf --tac | {perl_part} ; echo" subprocess.call(command, shell=True)
class DiskCachePersistence: def __init__(self, dbname, dbpassphrase): self.dbname = dbname self.dbpassphrase = dbpassphrase self.db = Cache(dbname) # TODO: create encrypted Cache with kdf dbpassphrase # TODO: purge expired skippedMessageKey based on store_time def save_conversation(self, conversation): return self.db.set(b'conv:' + conversation.ks['CONVid'], prefix='conv', tag='conv', retry=True) def load_conversation(self, conv_id): return self.db.get(b'conv:' + conv_id, None, retry=True) def delete_conversation(self, conversation): return self.db.pop(b'conv:' + conversation.ks['CONVid'], None, retry=True) def get_other_names(self, name): names = [] for k in self.db: if k.startswith('conv:'): names.append(self.db[k].other_name) return names
class cache: def __init__(self, latest=False): self.feedCache = Cache(".feedcache") self.latest = latest def __preprocess_title(self, feed): for entry in feed.entries: entry["feed_src"] = feed["feed"]["title"] return feed def __manage_cache(self, url): try: if url in self.feedCache: data = self.feedCache.get(url) else: parsed_feed = feedparser.parse(url) data = self.__preprocess_title(parsed_feed) # cache expires in 30 mins self.feedCache.add(url, data, expire=1800) self.feedCache.close() except ValueError: pass except Exception: pass return data def get_feed(self, url): if self.latest: latestFeed = self.__manage_cache(url).entries if len(latestFeed) > 0: return latestFeed[0] else: pass return self.__manage_cache(url).entries
class CacheManager(object): def __init__(self, cache_path=CACHE_PATH): self.cache = Cache(cache_path) def items(self): return self.cache.iterkeys() def has_key(self, key): return key in self.cache def set(self, key, value, ttl=TTL): return self.cache.set(key=key, value=value, expire=ttl) def get(self, key): return self.cache.get(key=key) def clear_cache(self): for key in self.cache: if key != 'censys_credentials': del self.cache[key] return True def close(self): self.cache.close()
sys.stderr.write( '(run this in your terminal: "python3 -m pip install requests" or "python3 -m pip install --user requests")\n' ) exit(2) MAX_PERSONS = 200 # is subject to change: see https://www.familysearch.org/developers/docs/api/tree/Persons_resource FACT_TAGS = { 'http://gedcomx.org/Birth': 'BIRT', 'http://gedcomx.org/Death': 'DEAT', } tmp_dir = os.path.join(tempfile.gettempdir(), 'getAncestorDataGUI') global cache cache = Cache(tmp_dir) lang = cache.get('lang') def _(string): return string def cont(string): """ parse a GEDCOM line adding CONT and CONT tags if necessary """ level = int(string[:1]) + 1 lines = string.splitlines() res = list() max_len = 255 for line in lines: c_line = line to_conc = list()
import tempfile import asyncio import re import os import sys # local import from getmyancestors import Session, Tree, Indi, Fam from mergemyancestors import Gedcom from translation import translations tmp_dir = os.path.join(tempfile.gettempdir(), 'fstogedcom') global cache cache = Cache(tmp_dir) lang = cache.get('lang') def _(string): if string in translations and lang in translations[string]: return translations[string][lang] return string # Entry widget with right-clic menu to copy/cut/paste class EntryWithMenu(Entry): def __init__(self, master, **kw): super(EntryWithMenu, self).__init__(master, **kw) self.bind('<Button-3>', self.click_right) def click_right(self, event):