def store_full_txns(block_model: "l1_block_model.L1BlockModel") -> None: """ Store the transactions object as a single file per block in storage. Also updates the indexes for each indexed transaction in ES with block information. """ _log.info("[TRANSACTION DAO] Putting transaction to storage") storage.put(f"{FOLDER}/{block_model.block_id}", block_model.export_as_full_transactions().encode("utf-8")) # Could optimize by grouping indexing of transactions in the block with matchking txn_types using redisearch.put_many_documents for txn in block_model.transactions: redisearch.put_document(redisearch.Indexes.transaction.value, f"txn-{txn.txn_id}", {"block_id": txn.block_id}, upsert=True) try: redisearch.put_document(txn.txn_type, txn.txn_id, txn.export_as_search_index(), upsert=True) except redis.exceptions.ResponseError as e: # If the index doesn't exist, we don't care that we couldn't place the index (transaction type is probably deleted) if str(e) != "Unknown index name": raise else: _log.warning( f"Txn type {txn.txn_type} for txn {txn.txn_id} failed to index. (Transaction type may simply be deleted?) Ignoring" )
def perform_api_key_migration_v1_if_necessary() -> None: """Checks if an api key migration needs to be performed, and does so if necessary""" try: if storage.get(f"{FOLDER}/{MIGRATION_V1}") == b"1": # Migration was previously performed. No action necessary return except exceptions.NotFound: pass _log.info("Api key migration required. Performing now") valid_keys = storage.list_objects(prefix=FOLDER) regular_keys = list( filter(lambda x: not x.startswith("KEYS/INTERCHAIN/"), valid_keys)) interchain_keys = list( filter(lambda x: x.startswith("KEYS/INTERCHAIN/"), valid_keys)) for key in regular_keys: _log.info(f"Migrating {key}") api_key = api_key_model.new_from_legacy( storage.get_json_from_object(key), interchain_dcid="") save_api_key(api_key) for key in interchain_keys: _log.info(f"Migrating interchain key {key}") interchain_dcid = key[key.find("KEYS/INTERCHAIN/") + 16:] # Get the interchain dcid from the key api_key = api_key_model.new_from_legacy( storage.get_json_from_object(key), interchain_dcid=interchain_dcid) save_api_key(api_key) # Save migration marker once complete storage.put(f"{FOLDER}/{MIGRATION_V1}", b"1") _log.info("Api key migration v1 complete")
def start() -> None: """ Ran by the webserver before it boots """ try: # New chains are often given HMAC keys when created. If found, we write them to storage. key_id = secrets.get_dc_secret("hmac-id") json_key = json.dumps( { "id": key_id, "key": secrets.get_dc_secret("hmac-key"), "root": True, "registration_time": 0 }, separators=(",", ":")) _log.info( "HMAC keys were given to this chain on-boot. Writing them to storage." ) storage.put(f"KEYS/{key_id}", json_key.encode("utf-8")) except exceptions.NotFound: _log.info( "No HMAC keys were given to this chain on-boot. Skipping cretential storage write." ) _log.info("Checking if redisearch indexes need to be regenerated") try: redisearch.generate_indexes_if_necessary() except Exception: if not error_allowed: raise _log.info("Finish build successful")
def store_transaction_payloads(self) -> None: """Stores full transaction payloads for block""" for transaction in self.transactions: storage.put( f"PAYLOADS/{transaction.txn_id}", json.dumps(transaction.payload, separators=(",", ":")).encode("utf-8"))
def store_full_txns(block_model: "l1_block_model.L1BlockModel") -> None: """ Store the transactions object as a single file per block in storage. Also updates the indexes for each indexed transaction in ES with block information. """ _log.info("[TRANSACTION DAO] Putting transaction to storage") storage.put(f"{FOLDER}/{block_model.block_id}", block_model.export_as_full_transactions().encode("utf-8")) block_model.store_transaction_payloads() txn_dict: Dict[str, Dict[str, Dict[str, Any]]] = {} txn_dict[redisearch.Indexes.transaction.value] = {} # O(N) loop where N = # of txn # Could optimize by grouping indexing of transactions in the block with matchking txn_types using redisearch.put_many_documents for txn in block_model.transactions: txn_dict[redisearch.Indexes.transaction.value][f"txn-{txn.txn_id}"] = {"block_id": txn.block_id} try: if not txn_dict.get(txn.txn_type): txn_dict[txn.txn_type] = {} txn_dict[txn.txn_type][txn.txn_id] = txn.export_as_search_index() except redis.exceptions.ResponseError as e: # If the index doesn't exist, we don't care that we couldn't place the index (transaction type is probably deleted) if str(e) != "Unknown index name": raise else: _log.warning(f"Txn type {txn.txn_type} for txn {txn.txn_id} failed to index. (Transaction type may simply be deleted?) Ignoring") # O(N) loop where N = # of txn types + 1 for key, value in txn_dict.items(): # key = index, value = document redisearch.put_many_documents(key, value, upsert=True)
def store_full_txns(block_model: "l1_block_model.L1BlockModel") -> None: """ Store the transactions object as a single file per block in storage. Also updates the indexes for each indexed transaction in ES with block information. """ _log.info("[TRANSACTION DAO] Putting transaction to storage") storage.put(f"{FOLDER}/{block_model.block_id}", block_model.export_as_full_transactions().encode("utf-8")) elasticsearch.put_many_index_only( FOLDER, block_model.export_full_transactions_search_indexes())
def start() -> None: """ Ran by the webserver before it boots """ try: # New chains are often given HMAC keys when created. If found, we write them to storage. key_id = secrets.get_dc_secret("hmac-id") json_key = json.dumps( { "id": key_id, "key": secrets.get_dc_secret("hmac-key"), "root": True, "registration_time": 0 }, separators=(",", ":")) _log.info( "HMAC keys were given to this chain on-boot. Writing them to storage." ) storage.put(f"KEYS/{key_id}", json_key.encode("utf-8")) except exceptions.NotFound: _log.info( "No HMAC keys were given to this chain on-boot. Skipping cretential storage write." ) try: btc.BTCClient("BTC_MAINNET").register_address() btc.BTCClient("BTC_TESTNET3").register_address() except Exception: _log.exception( "!WARNING! Failed to register bitcoin address(es) with remote bitcoin RPC nodes" ) _log.info("Rehydrating the redis transaction list cache") try: transaction_type_dao.rehydrate_transaction_types() except Exception: if not error_allowed: raise _log.info("Finish build successful")
def set_funds(balance: Union[str, int, float]) -> None: storage.put("BROADCAST/CURRENT_FUNDS", str(balance).encode("utf-8"))
def set_last_watch_time() -> None: storage.put("BROADCAST/LAST_WATCH_TIME", str(int(time.time())).encode("utf-8"))
def set_last_broadcast_time() -> None: storage.put("BROADCAST/LAST_BROADCAST_TIME", str(int(time.time())).encode("utf-8"))
def set_last_block_number(block_id: str) -> None: storage.put("BROADCAST/LAST_BLOCK", block_id.encode("utf-8"))
def test_put_calls_cache_with_correct_params(self): storage.put("thing", b"val") storage.redis.cache_put.assert_called_once_with("thing", b"val", None)
def test_put_calls_storage_put_with_params(self): storage.put("thing", b"val") storage.storage.put.assert_called_once_with("test", "thing", b"val")