def fetch_city_name_id(city_id='', city_name='', db_name='imd_city_db'): ''' City Names, IDs and corresponding links are fetched from local levelDB. If you pass city_id and city_name both, city_id would be chosen over city_name, for lookup. Passing only city_name, would help you to find possible matches. If you pass no arguments, then all available records will be returned back. ''' resp = {} try: db_handle = DB(db_name, create_if_missing=True) if (city_id): if (not __validate_city_id__(city_id)): raise Exception('city id not validated') tmp = db_handle.get(city_id.encode('utf-8'), b'') if (tmp): resp.update({city_id: tmp.decode('utf-8').split(';')}) else: resp = {'status': 'record not found'} elif (city_name): resp.update(__match_city_name__(city_name, db_handle.iterator())) else: itr = db_handle.iterator() for i, j in itr: resp.update({i.decode('utf-8'): j.decode('utf-8').split(';')}) itr.close() db_handle.close() except plError as e: resp = {'status': str(e)} except Exception as e: resp = {'status': str(e)} return resp
def test_repair_db(): with tmp_db('repair', create=False) as name: db = DB(name, create_if_missing=True) db.put(b'foo', b'bar') db.close() del db plyvel.repair_db(name) db = DB(name) assert_equal(b'bar', db.get(b'foo'))
def test_open_close(): with tmp_db('open_close', create=False) as name: # Create a database with options that result in additional # object allocation (e.g. LRU cache). db = DB(name, create_if_missing=True, lru_cache_size=1024 * 1024, bloom_filter_bits=10) db.put(b'key', b'value') wb = db.write_batch() sn = db.snapshot() it = db.iterator() snapshot_it = sn.iterator() # Close the database db.close() assert db.closed # Expect runtime errors for operations on the database, with assert_raises(RuntimeError): db.get(b'key') with assert_raises(RuntimeError): db.put(b'key', b'value') with assert_raises(RuntimeError): db.delete(b'key') # ... on write batches, with assert_raises(RuntimeError): wb.put(b'key', b'value') # ... on snapshots, assert_raises(RuntimeError, db.snapshot) with assert_raises(RuntimeError): sn.get(b'key') # ... on iterators, with assert_raises(RuntimeError): next(it) # ... and on snapshot iterators, with assert_raises(RuntimeError): next(snapshot_it)
def deobfuscate_with_db(db: plyvel.DB, data: bytes) -> bytes: # Load obfuscation key (if it exists) o_key = db.get((bytes.fromhex('0e00') + b'obfuscate_key')) # If the key exists, the leading byte indicates the length of the key (8 byte by default). If there is no key, # 8-byte zeros are used (since the key will be XORed with the given values). if o_key is not None: o_key = o_key[1:] return deobfuscate_with_key(o_key, data)
def _append(self, db: plyvel.DB, key: bytes, value: bytes) -> None: """ Add key-value pair to DB, appending if a value already exists.""" prev = db.get(key, default=b"", fill_cache=False) db.put(key, prev + value)
class Snapshot(object): """ use persistent method (like file, db and so on) to store (cache) Output of the Input, so we can bypass the known pair to save time/cpu/... """ def __init__(self, dbpath, *args, debug=False, refresh=None, **kwargs): """ :param refresh: ignore data in db and refresh using new value """ super().__init__(*args, **kwargs) try: self.db = DB(dbpath, create_if_missing=True) except Exception as e: self.db = None raise e self.old_key = None self.upgrade = False if debug: handler.level = logging.DEBUG if refresh: self.refresh = True else: self.refresh = False def __del__(self): self.close() def __exit__(self): self.db.close() def __iter__(self): for k, v in self.db.iterator(): yield self.recover_bytes(k), self.recover_bytes(v) def __contains__(self, key): # raise Exception('we do NOT know which one means EXIST') return self.get(key, None) is not None def __call__(self, *args, ignore=None, redos=None): return self.snapshot(*args, ignore, redos) def close(self): if self.db: self.db.close() self.db = None @staticmethod def to_bytes(data): """ support all basic type. but never support recursion data, like List[Dict]. all data will be translated to bytes if possible. use pickle to save bytes so we can store any possible data. """ s = pickle.dumps(data) return s @staticmethod def recover_bytes(data): s = data return pickle.loads(s) def get(self, key, default=None): """ user shold determine the key exist or not (according to the default) """ logger.debug('key: {}', key, ) key = self.to_bytes(key) data = self.db.get(key, default) if data != default: logger.debug('get exist: {} -> data(type={})', key, type(data)) return data def get_result(self, key) -> bytes: """ get the value related to the key, return the result by decoding it from bytes :param key: :return: """ data = self.get(key) if data is None: return None else: return self.recover_bytes(data) def put(self, k, v): logger.debug('put: {} -> data(type={})', k, type(v)) key = self.to_bytes(k) data = self.to_bytes(v) return self.db.put(key, data) def exist(self, key): return key in self def delete(self, k): key = self.to_bytes(k) return self.db.delete(key) def set_upgrade(self, *old_args): positions, keys = self.get_key_config(*old_args) self.upgrade = True self.old_key = positions, keys @staticmethod def get_key_config(*args): positions, keys = [], [] for item in args: if isinstance(item, int): positions.append(item) elif isinstance(item, str): keys.append(item) return positions, keys def get_key(self, positions, keys, *args, **kwargs): logger.debug('get key from {} {} (positions:{} keys:{})', args, kwargs, positions, keys, ) key = [] for p in positions: key.append(args[p]) for k in keys: key.append(kwargs[k]) return key def snapshot(self, *_args, ignore=None, redos=None, ignore_callback=None, redo_callback=None): """ the args: can be number: the idx/pos of given args can be string: the key name in kwargs the kwargs: some config for snapshot """ logger.debug('choose as key: {}', _args) positions, keys = self.get_key_config(*_args) # will ignore some return value, aka. no snapshot for it _ignore = ignore # will redo for some return value, should be a list _redos = redos or [] logger.debug('choose position args: {}', positions) logger.debug('choose name kwargs: {}', keys) def do_snapshot(func): def is_ignore(value): if value == _ignore: return True if ignore_callback and ignore_callback(value): return True return False def is_redo(value): if value in _redos: return True if redo_callback and redo_callback(value): return True return False def worker(*args, **kwargs): key = self.get_key(positions, keys, *args, **kwargs) if self.upgrade: old_key = self.get_key( self.old_key[0], self.old_key[1], *args, **kwargs) logger.info('will upgrade old_key: {}', old_key) result = self.get(old_key) if result is not None: result = self.recover_bytes(result) logger.info('upgrade result: {} -> {} -> {}', old_key, key, result) self.delete(old_key) self.put(key, result) return result else: result = self.get(key) if result is None: pass else: result = self.recover_bytes(result) if is_redo(result): logger.warning('redo result: {}', result) logging.getLogger().warning('redo result') elif self.refresh: pass else: return result result = func(*args, **kwargs) value = result if is_ignore(value): logger.warning('ignore result: {}', result) elif is_redo(value): logger.warning('redo result: {}', result) else: self.put(key, value) return result return worker return do_snapshot