def select_transaction(block_id: str, txn_id: str, cache_expire: Optional[int] = None) -> dict: """Returns an transaction in a block from storage through the LRU cache block_id: The ID of the block being queried txn_id: The ID of the transaction in the block cache_expire: The amount of time (in seconds) until the key expires if cache miss Returns: transaction JSON object Raises: exceptions.NotFound exception when block id not found exceptions.StorageError on any unexpected error interacting with storage """ try: obj: Any = None key = f"{block_id}/{txn_id}" obj = redis.cache_get(key) if obj: return json.loads(obj) obj = storage.select_transaction(STORAGE_LOCATION, block_id, txn_id) cache_val = json.dumps(obj, separators=(",", ":")).encode("utf-8") if len(cache_val) < CACHE_LIMIT: redis.cache_put(key, cache_val, cache_expire) return obj except exceptions.NotFound: raise except Exception: _log.exception( "Uncaught exception while performing storage select_transaction") raise exceptions.StorageError( "Uncaught exception while performing storage select_transaction")
def get(key: str, cache_expire: Optional[int] = None, should_cache: bool = True) -> bytes: """Returns an object from storage Args: key: The key to get from storage cache_expire: The amount of time (in seconds) until the key expires if cache miss should_cache: Whether or not to fetch/save to/from cache Returns: data as bytes Raises: exceptions.NotFound exception if key is not found in storage exceptions.StorageError on any unexpected error interacting with storage """ try: obj = None if should_cache: obj = redis.cache_get(key) if not obj: obj = storage.get(STORAGE_LOCATION, key) if should_cache and len(obj) < CACHE_LIMIT: redis.cache_put(key, obj, cache_expire) return obj except exceptions.NotFound: raise except Exception: _log.exception("Uncaught exception while performing storage get") raise exceptions.StorageError( "Uncaught exception while performing storage get")
def delete(key: str) -> None: """Deletes an object in S3 with cache write-thru Args: key: The key of the object being deleted in S3 Raises: exceptions.StorageError on any unexpected error interacting with storage """ try: storage.delete(STORAGE_LOCATION, key) redis.cache_delete(key) except Exception: raise exceptions.StorageError( "Uncaught exception while performing storage delete")
def does_object_exist(key: str) -> bool: """Tests whether or not an object key exists Args: key: The object key to check Returns: True if the object exists, False otherwise Raises: exceptions.StorageError on any unexpected error interacting with storage """ try: return storage.does_object_exist(STORAGE_LOCATION, key) except Exception: raise exceptions.StorageError( "Uncaught exception while performing storage does_object_exist")
def list_objects(prefix: str) -> List[str]: """List object keys under a common prefix Args: prefix The prefix key to scan Returns: list of string keys on success Raises: exceptions.StorageError on any unexpected error interacting with storage """ try: return storage.list_objects(STORAGE_LOCATION, prefix) except Exception: raise exceptions.StorageError( "Uncaught exception while performing storage list_objects")
def delete_directory(directory_key: str) -> None: """Deletes a "directory" key (aka super key) Recursively lists all objects within a directory and deletes them, as well as the folders, if relevant Args: directory_key: The key of the directory to delete Raises: exceptions.StorageError on any unexpected error interacting with storage """ try: keys = list_objects(directory_key) for key in keys: delete(key) storage.delete_directory(STORAGE_LOCATION, directory_key) except Exception: raise exceptions.StorageError( "Uncaught exception while performing storage delete_directory")
def put(key: str, value: bytes, cache_expire: Optional[int] = None, should_cache: bool = True) -> None: """Puts an object into storage with optional cache write-thru Args: key: The key of the object being written in S3 value: The value of the bytes object being written in S3 cache_expire: The amount of time (in seconds) until the key expires in the cache Raises: exceptions.StorageError on any unexpected error interacting with storage """ try: storage.put(STORAGE_LOCATION, key, value) if should_cache: redis.cache_put(key, value, cache_expire) except Exception: raise exceptions.StorageError( "Uncaught exception while performing storage put")