Exemplo n.º 1
0
def start() -> None:
    """
    Ran by the webserver before it boots
    """
    try:
        # Chains are given HMAC keys when created. If found, we write them to storage.
        key_id = secrets.get_dc_secret("hmac-id")
        _log.info(
            "HMAC keys were given to this chain on-boot. Writing them to storage."
        )
        storage.put_object_as_json(
            f"KEYS/{key_id}", {
                "id": key_id,
                "key": secrets.get_dc_secret("hmac-key"),
                "root": True,
                "registration_time": 0
            })
    except exceptions.NotFound:
        _log.info(
            "No HMAC keys were given to this chain on-boot. Skipping credential storage write."
        )

    _log.info("Checking if redisearch indexes need to be regenerated")
    redisearch.generate_indexes_if_necessary()

    _log.info("Finish pre-boot successful")
Exemplo n.º 2
0
def put_index_in_storage(folder: str, namespace: str, data_model: "model.Model") -> None:
    """Store data model content in storage, while creating an elastic search index
    Args:
        folder: the storage folder to use
        namespace: the storage namespace to use
        data_model: the data model to export
    """
    _set_elastic_search_client_if_necessary()
    _log.info(f"[DAO] SET -> {namespace}")

    indexable_object = data_model.export_as_search_index()
    full_object = data_model.export_as_at_rest()

    _log.info(f"[DAO] ES => INDEXABLE_OBJECT: {indexable_object}")

    _log.info(f"[DAO] ES => INDEXING OBJECT: namespace: {namespace}")
    _es_client.index(index=build_index(folder=folder), doc_type="_doc", id=namespace, body=json.dumps(indexable_object, separators=(",", ":")))

    _log.info(f"[DAO] storage => UPLOADING OBJECT: {full_object}")
    try:
        key = f"{folder.upper()}/{namespace}"
        try:
            # Smart contracts use a seperate key for metadata
            if data_model.is_sc_model:  # noqa: T484 property might not exist, which is explicitly caught
                key += "/metadata.json"
        except Exception:
            pass
        storage.put_object_as_json(key, full_object)
    except Exception:
        _es_client.delete(index=build_index(folder=folder), doc_type="_doc", id=namespace)
Exemplo n.º 3
0
def process_receipt_v1(block_dto: Dict[str, Any]) -> None:
    if not block_dto:
        raise exceptions.ValidationException("block_dto missing")
    _log.info(
        f"[RECEIPT] Got receipt from L{block_dto['header']['level']}: {block_dto}"
    )
    block_model = cast("model.BlockModel",
                       None)  # This will always get defined, or it will raise
    level_received_from: int = block_dto["header"]["level"]
    if level_received_from == 2:
        block_model = l2_block_model.new_from_at_rest(block_dto)
    elif level_received_from == 3:
        block_model = l3_block_model.new_from_at_rest(block_dto)
    elif level_received_from == 4:
        block_model = l4_block_model.new_from_at_rest(block_dto)
    elif level_received_from == 5:
        block_model = l5_block_model.new_from_at_rest(block_dto)
    else:
        raise exceptions.InvalidNodeLevel("Unsupported level receipt")

    _log.info(f"Block model {block_model.__dict__}")
    l1_block_id_set = block_model.get_associated_l1_block_id()

    _log.info(
        f"Processing receipt for blocks {l1_block_id_set} from L{level_received_from}"
    )
    for l1_block_id in l1_block_id_set:
        # Check that the chain which sent this receipt is in our claims, and that this L1 block is accepting receipts for this level
        validations = matchmaking.get_claim_check(
            l1_block_id)["validations"][f"l{level_received_from}"]
        if (
                block_model.dc_id in validations
        ) and broadcast_functions.is_block_accepting_verifications_from_level(
                l1_block_id, level_received_from):
            _log.info(
                f"Verified that block {l1_block_id} was sent. Inserting receipt"
            )
            storage_location = broadcast_functions.verification_storage_location(
                l1_block_id, level_received_from, block_model.dc_id)
            storage.put_object_as_json(storage_location,
                                       block_model.export_as_at_rest())
            # Set new receipt for matchmaking claim check
            try:
                block_id = block_model.block_id
                proof = block_model.proof
                dc_id = block_model.dc_id
                matchmaking.add_receipt(l1_block_id, level_received_from,
                                        dc_id, block_id, proof)
            except Exception:
                _log.exception("matchmaking add_receipt failed!")
            # Update the broadcast system about this receipt
            broadcast_functions.set_receieved_verification_for_block_from_chain_sync(
                l1_block_id, level_received_from, block_model.dc_id)
        else:
            _log.warning(
                f"Chain {block_model.dc_id} (level {level_received_from}) returned a receipt that wasn't expected (possibly expired?) for block {l1_block_id}. Rejecting receipt"  # noqa: B950
            )
            raise exceptions.NotAcceptingVerifications(
                f"Not accepting verifications for block {l1_block_id} from {block_model.dc_id}"
            )
Exemplo n.º 4
0
    def deploy_to_openfaas(self) -> None:
        """Deploy this job's smart contract to OpenFaaS and update the faas_spec

            Returns:
                None, or throws exceptions.InternalServerError
        """
        _log.info("Deploying to OpenFaaS cluster")
        spec = self.get_openfaas_spec()
        requests_method = requests.post if self.model.task_type == "create" else requests.put

        response = requests_method(
            f"{FAAS_GATEWAY}/system/functions",
            headers={"Authorization": faas.get_faas_auth()},
            json=spec)
        _log.info(f"Deployment status: {response.status_code}")
        if response.status_code not in [200, 202]:
            _log.info(f"OpenFaaS deploy failure: {response.status_code}")
            self.model.set_state(state=self.end_error_state,
                                 msg="Failed message state")
            raise exceptions.ContractException(
                "Contract function deployment failure")

        _log.info("Saving faas_spec.json to storage")
        if os.environ["STORAGE_TYPE"].lower() == "disk":
            os.setuid(1000)
        storage.put_object_as_json(
            key=f"SMARTCONTRACT/{self.model.id}/faas_spec.json", value=spec)
Exemplo n.º 5
0
def register_new_auth_key(smart_contract: bool = False,
                          auth_key: str = "",
                          auth_key_id: str = "",
                          nickname: str = "") -> Dict[str, Any]:
    """Register a new auth key for use with the chain
    Args:
        smart_contract: whether it should generate a key for a smart contract
        auth_key: (optional) specify an auth_key to use (must be in conjunction with auth_key_id)
        auth_key_id: (optional) specify an auth_key_id to use (must be in conjunction with auth_key_id)
    Returns:
        Dictionary where 'id' is the new auth_key_id and 'key' is the new auth_key
    Raises:
        ValueError when only one of auth_key or auth_key_id are defined, but not both
    """
    if (not auth_key) or (not auth_key_id):
        # Check that both are not specified (don't allow only auth_key or auth_key_id to be individually provided)
        if auth_key or auth_key_id:
            raise ValueError(
                "auth_key and auth_key_id must both be specified together if provided"
            )
        # Python do-while
        while True:
            auth_key_id = gen_auth_key_id(smart_contract)
            # Make sure this randomly generated key id doesn't already exist
            if not get_auth_key(auth_key_id, False):
                break
        auth_key = gen_auth_key()
    register = {
        "key": auth_key,
        "id": auth_key_id,
        "registration_time": int(time.time()),
        "nickname": nickname
    }
    storage.put_object_as_json(f"KEYS/{auth_key_id}", register)
    return register
def output_to_heap(contract_response: Dict[str, Any],
                   contract_id: str) -> None:
    for key, value in contract_response.items():
        _log.info(
            f"[SC-RETURN] Heap set request {key} : {value} to {contract_id}")
        storage.put_object_as_json(f"SMARTCONTRACT/{contract_id}/HEAP/{key}",
                                   value)
Exemplo n.º 7
0
def create_new_transaction_type(txn_type_model: transaction_type_model.TransactionTypeModel) -> None:
    """Save a new transaction type model"""
    txn_type_dto = txn_type_model.export_as_at_rest()
    _log.info(f"Adding transaction index for {txn_type_model.txn_type}")
    redisearch.create_transaction_index(txn_type_model.txn_type, txn_type_model.custom_indexes)
    _log.debug(f"Queuing for activation")
    redis.lpush_sync(QUEUED_TXN_TYPES, txn_type_model.txn_type)
    _log.debug(f"Adding the transaction type to storage")
    storage.put_object_as_json(f"{FOLDER}/{txn_type_model.txn_type}", txn_type_dto)
Exemplo n.º 8
0
def update_registration(new_data: dict) -> None:
    try:
        _log.info(f"[MATCHMAKING] Putting matchmaking config in storage: {new_data}")
        matchmaking_config = storage.get_json_from_object("MATCHMAKING_CONFIG.json")
        matchmaking_config.update(new_data)
        storage.put_object_as_json("MATCHMAKING_CONFIG.json", matchmaking_config)
        register()
    except Exception:
        raise exceptions.MatchmakingError("Failure updating matchmaking data")
Exemplo n.º 9
0
def update_api_key_v1(key_id: str, nickname: str) -> None:
    """Updates the nickname for an existing key
    Args:
        key_id: ID of api key to update
        nickname: new nickname for the given key
    """
    key = storage.get_json_from_object(f"KEYS/{key_id}")
    key["nickname"] = nickname
    storage.put_object_as_json(f"KEYS/{key_id}", key)
Exemplo n.º 10
0
async def save_unfinished_claim(block_id: str) -> None:
    """If a claim no longer exists in Dragon Net, but we don't have all the results,
       save its id for a potentially later date.

    Args:
        block_id: The block_id to save and remove from the broadcasting system
    """
    storage.put_object_as_json(f"{STORAGE_FOLDER}/UNFINISHED/{block_id}",
                               {"time": time.time()})
    await remove_block_from_broadcast_system_async(block_id)
Exemplo n.º 11
0
def insert_block(block: "model.BlockModel") -> None:
    """
    Insert new block into blockchain and ref to block's hash
    """
    #  Create ref to this block for the next block
    last_block_ref = {"block_id": block.block_id, "proof": block.proof}
    #  Upload stripped block
    elasticsearch.put_index_in_storage(FOLDER, block.block_id, block)

    #  Upload ref
    storage.put_object_as_json(f"{FOLDER}/{LAST_CLOSED_KEY}", last_block_ref)
Exemplo n.º 12
0
def store_l4_blocks(next_block_id_to_broadcast: str) -> None:
    # Gets stringified lists of L4 blocks from different L1s
    # Shape: ["{l4 block in transit}", "{l4 block in transit}"]
    _log.info("[L5] Storing L4 blocks")
    queue.check_and_recover_processing_if_necessary()
    l4_blocks = queue.get_new_l4_blocks()
    _log.info(f"[L5] Popped {len(l4_blocks)} L4 blocks off of queue")
    if l4_blocks:
        verified_records = verify_blocks(l4_blocks)
        storage.put_object_as_json(f"BROADCAST/TO_BROADCAST/{next_block_id_to_broadcast}/{str(uuid.uuid4())}", verified_records)
    # Successfully handled block popped from redis
    queue.clear_processing_queue()
Exemplo n.º 13
0
def insert_block(block: "model.BlockModel") -> None:
    """
    Insert new block into blockchain and ref to block's hash
    """
    #  Create ref to this block for the next block
    last_block_ref = {"block_id": block.block_id, "proof": block.proof}
    #  Upload stripped block
    redisearch.put_document(redisearch.Indexes.block.value, block.block_id,
                            block.export_as_search_index())
    storage.put_object_as_json(f"{FOLDER}/{block.block_id}",
                               block.export_as_at_rest())

    #  Upload ref
    storage.put_object_as_json(f"{FOLDER}/{LAST_CLOSED_KEY}", last_block_ref)
Exemplo n.º 14
0
def store_registered_transaction_type(
    transaction_type_model: transaction_type_model.TransactionTypeModel
) -> None:
    """
    Stores a new transaction type
    """
    _log.info("Uploading to datastore")
    storage.put_object_as_json(
        f"{FOLDER}/TYPES/{transaction_type_model.txn_type}",
        transaction_type_model.export_as_at_rest())
    redis.sadd_sync(TYPE_LIST_KEY, cast(
        str,
        transaction_type_model.txn_type))  # This should defined when passed in
    _log.info("Successfully uploaded new transaction type to datastore")
Exemplo n.º 15
0
def save_interchain_auth_key(interchain_dcid: str, auth_key: str) -> bool:
    """Register a new interchain auth key. !This will overwrite any existing interchain key for this dcid!
    Args:
        interchain_dcid: chain id of the interchain sharing this key
        auth_key: auth_key to add
    Returns:
        Boolean if successful
    """
    try:
        # Add the new key
        register = {"key": auth_key, "registration_time": int(time.time())}
        storage.put_object_as_json(f"KEYS/INTERCHAIN/{interchain_dcid}",
                                   register)
        return True
    except Exception:
        return False
Exemplo n.º 16
0
def activate_transaction_types_if_necessary(block_id: str) -> None:
    """Activate transaction type(s) by setting them to active at a certain block number (for index regeneration purposes)
    Args:
        block_id: the current block id where the transaction types are being activated (if they exist)
    """
    # Get all the queued transaction types
    p = redis.pipeline_sync(transaction=True)
    p.lrange(QUEUED_TXN_TYPES, 0, -1)
    p.delete(QUEUED_TXN_TYPES)
    results, _ = p.execute()
    for txn_type in results:
        try:
            txn_type_model = get_registered_transaction_type(txn_type.decode("utf8"))
            txn_type_model.active_since_block = block_id
            # Save the transaction type state
            storage.put_object_as_json(f"{FOLDER}/{txn_type_model.txn_type}", txn_type_model.export_as_at_rest())
        except exceptions.NotFound:
            pass  # txn_type was probably deleted before activating. Simply ignore it
Exemplo n.º 17
0
def set_default_interchain_client(blockchain: str,
                                  name: str) -> "model.InterchainModel":
    """Set the default interchain model for this chain
    Args:
        blockchain: the blockchain of the desired client (i.e. bitcoin, ethereum, etc)
        name: the name (id) of the network to set as default (user defined on the creation of the interchain)
    Returns:
        The client for the interchain which was set as default
    Raises:
        exceptions.NotFound: When trying to set a default to an interchain that doesn't exist on this chain
    """
    # Make sure the specified interchain exists before setting as default
    client = get_interchain_client(blockchain, name)
    storage.put_object_as_json(f"{FOLDER}/default", {
        "version": "1",
        "blockchain": blockchain,
        "name": name
    })
    return client
Exemplo n.º 18
0
def finalize_block(block: l5_block_model.L5BlockModel,
                   last_confirmed_block: Dict[str, Any],
                   confirmed_txn_hash: str) -> None:
    _log.info(f"[L5] Block {block.block_id} confirmed")
    if last_confirmed_block["proof"].get("proof"):
        block.prev_proof = last_confirmed_block["proof"]["proof"]

    _log.info("[L5] Signing block")
    block.transaction_hash = [confirmed_txn_hash]
    block.proof = keys.get_my_keys().sign_block(block)

    _log.info("[L5] Storing new block and moving pointers")
    storage.put_object_as_json(f"BLOCK/{block.block_id}",
                               block.export_as_at_rest())
    # In the future if we change/add indexes to an L5 block, it may need to be re-indexed here.
    # For now, no re-indexing is necessary, only a storage update
    set_last_confirmed_block(block)

    # Notify L1s that contributed to L5 block
    broadcast.dispatch(block)
Exemplo n.º 19
0
def broadcast_to_public_chain(l5_block: l5_block_model.L5BlockModel) -> None:
    _log.info("[L5] Preparing to broadcast")
    # Hash the block and publish the block to a public network
    public_hash = keys.get_my_keys().hash_l5_for_public_broadcast(l5_block)
    transaction_hash = _interchain_client.publish_l5_hash_to_public_network(
        public_hash)
    _log.info(
        "[L5] After Publish to public network, setting new broadcast time")
    _log.info(f"[L5] transaction_hash {transaction_hash}")

    # Append transaction hash to list, add network and last block sent at
    l5_block.transaction_hash += [transaction_hash]
    l5_block.block_last_sent_at = _interchain_client.get_current_block()
    l5_block.network = INTERCHAIN_NETWORK

    storage_key = f"BLOCK/{l5_block.block_id}"
    _log.info(f"[L5] Adding to storage at {storage_key} and creating index")
    storage.put_object_as_json(storage_key, l5_block.export_as_at_rest())
    redisearch.put_document(redisearch.Indexes.block.value, l5_block.block_id,
                            l5_block.export_as_search_index())
Exemplo n.º 20
0
def set_receipt_data(key: str, data_model: "model.BlockModel", l1_block_id: str, level: int) -> None:
    """Store receipt as JSON content in storage, while updating ES metadata to reflect relevant verifications
    Args:
        key: String of the storage key for the receipt block
        data_model: block data model which you want to store as JSON in storage.
        l1_block_id: the level 1 block id that this receipt corresponds to
        level: the level of the received block
    """
    _set_elastic_search_client_if_necessary()
    full_object = data_model.export_as_at_rest()

    # then get block from elastic and then send with incremented verification count.
    upscript = {"script": f"ctx._source.l{level}_verifications += 1"}
    count = 0
    elastic_success = False
    while count < ES_RETRY_COUNT:
        try:
            elastic_response = _es_client.update(index=f"{INTERNAL_ID}-block", doc_type="_doc", id=l1_block_id, body=upscript)
            if int(elastic_response["_shards"]["successful"]) > 0:
                _log.info("[DAO] Successfully indexed block verifications")
                elastic_success = True
                break
        except Exception:
            count += 1
    if not elastic_success:
        _log.error("[DAO] ES Indexing Failed, abort storage upload.")
        raise exceptions.ElasticSearchFailure("Elasticsearch Index Failure")
    # map through each transaction in l2 receipts to update metadata rejections if relevant
    if level == 2:
        bulk_rejections = {}
        upscript = {"script": "ctx._source.l2_rejections += 1"}
        data_model = cast("l2_block_model.L2BlockModel", data_model)  # if L2, we know this is an L2 block model
        for txn in data_model.validations_dict:
            if not data_model.validations_dict[txn]:
                bulk_rejections.update({txn: upscript})
        if len(bulk_rejections.keys()) != 0:
            _log.info("[DAO] Level 2 block had rejections, bulk updating rejections for transactions")
            bulk_index = prepare_bulk(bulk_rejections, index=f"{data_model.get_associated_l1_dcid()}-transaction", bulk_op="update")
            elasticsearch.helpers.bulk(_es_client, bulk_index)
    _log.info("[DAO] STORAGE => Uploading receipt to storage")
    storage.put_object_as_json(key, full_object)
Exemplo n.º 21
0
def save_interchain_client(interchain_client: "model.InterchainModel") -> None:
    """Save an interchain model to storage"""
    storage.put_object_as_json(
        f"{FOLDER}/{interchain_client.blockchain}/{interchain_client.name}",
        interchain_client.export_as_at_rest())
Exemplo n.º 22
0
def save_api_key(api_key: api_key_model.APIKeyModel) -> None:
    """Save an api key model to storage"""
    storage.put_object_as_json(
        f"{INTERCHAIN_FOLDER if api_key.interchain else FOLDER}/{api_key.key_id}",
        api_key.export_as_at_rest())
Exemplo n.º 23
0
def set_last_confirmed_block(l5_block: l5_block_model.L5BlockModel) -> None:
    storage.put_object_as_json(
        "BROADCAST/LAST_CONFIRMED_BLOCK", {
            "block_id": l5_block.block_id,
            "proof": l5_block.export_as_at_rest()["proof"]
        })
Exemplo n.º 24
0
 def test_put_object_as_json_calls_put_with_correct_params(self):
     storage.put = MagicMock()
     storage.put_object_as_json("key", {})
     storage.put.assert_called_once_with("key", b"{}", None, True)
Exemplo n.º 25
0
 def save(self) -> None:
     """Active record-style save for smart contract state"""
     storage.put_object_as_json(f"SMARTCONTRACT/{self.id}/metadata.json",
                                self.export_as_at_rest())