Пример #1
0
def perform_api_key_migration_v1_if_necessary() -> None:
    """Checks if an api key migration needs to be performed, and does so if necessary"""
    try:
        if storage.get(f"{FOLDER}/{MIGRATION_V1}") == b"1":
            # Migration was previously performed. No action necessary
            return
    except exceptions.NotFound:
        pass
    _log.info("Api key migration required. Performing now")
    valid_keys = storage.list_objects(prefix=FOLDER)
    regular_keys = list(
        filter(lambda x: not x.startswith("KEYS/INTERCHAIN/"), valid_keys))
    interchain_keys = list(
        filter(lambda x: x.startswith("KEYS/INTERCHAIN/"), valid_keys))
    for key in regular_keys:
        _log.info(f"Migrating {key}")
        api_key = api_key_model.new_from_legacy(
            storage.get_json_from_object(key), interchain_dcid="")
        save_api_key(api_key)
    for key in interchain_keys:
        _log.info(f"Migrating interchain key {key}")
        interchain_dcid = key[key.find("KEYS/INTERCHAIN/") +
                              16:]  # Get the interchain dcid from the key
        api_key = api_key_model.new_from_legacy(
            storage.get_json_from_object(key), interchain_dcid=interchain_dcid)
        save_api_key(api_key)
    # Save migration marker once complete
    storage.put(f"{FOLDER}/{MIGRATION_V1}", b"1")
    _log.info("Api key migration v1 complete")
Пример #2
0
def search(
    folder: str,
    query: Optional[dict] = None,
    q: Optional[str] = None,
    get_all: bool = False,
    sort: Optional[str] = None,
    offset: Optional[int] = None,
    limit: Optional[int] = None,
    should_parse: bool = True,
) -> "ESSearch":
    """invoke queries on elastic search indexes built with #set. Return the full storage stored object
    Args:
        query: Elastic search query. The search definition using the ES Query DSL.
        q: Query in the Lucene query string syntax
    Returns:
        storage objects matching search query
    """
    hits_pages = get_index_only(folder, query, q, get_all, sort, offset, limit)
    _log.info(f"Pages: {hits_pages}")
    storage_objects = []

    for hit in hits_pages["hits"]:
        storage_id = hit["_source"][S3_OBJECT_ID]  # get the id
        # get the folder
        storage_folder = hit["_source"][S3_OBJECT_FOLDER]
        storage_object = storage.get_json_from_object(f"{storage_folder}/{storage_id}")  # pull the object from storage
        storage_objects.append(storage_object)  # add to the result set

        #  Parse transactions if should_parse is flagged, and the transactions array exists
        if storage_object["dcrn"] == schema.DCRN.Block_L1_At_Rest.value and should_parse:
            for index, transaction in enumerate(storage_object["transactions"]):
                storage_object["transactions"][index] = json.loads(transaction)

    return {"results": storage_objects, "total": hits_pages["total"]}
Пример #3
0
def check_confirmations() -> None:
    last_confirmed_block = get_last_confirmed_block()
    last_confirmed_block_number = last_confirmed_block["block_id"]
    last_created_block = get_last_block_number()

    _log.info(
        f"[L5] Last confirmed block is {last_confirmed_block_number}, last created block is {last_created_block}"
    )

    if int(last_confirmed_block_number) < int(last_created_block):
        # Check for confirmations
        next_block_to_confirm = int(last_confirmed_block_number) + 1
        block_key = f"BLOCK/{next_block_to_confirm}"
        block = l5_block_model.new_from_at_rest(
            storage.get_json_from_object(block_key))

        for txn_hash in block.transaction_hash:
            try:
                if _interchain_client.is_transaction_confirmed(txn_hash):
                    finalize_block(block, last_confirmed_block, txn_hash)
                    # Stop execution here!
                    return
            except exceptions.RPCTransactionNotFound:
                #  If transaction not found, it may have been dropped, so we remove it from the block
                block.transaction_hash.remove(txn_hash)

        # If execution did not stop, the block is not confirmed.
        if _interchain_client.should_retry_broadcast(block.block_last_sent_at):
            broadcast_to_public_chain(block)
Пример #4
0
def _generate_l5_verification_indexes() -> None:
    client = _get_redisearch_index_client(Indexes.verification.value)
    client.drop_index()
    try:
        client.create_index(
            [
                redisearch.NumericField("block_id", sortable=True),
                redisearch.NumericField("prev_id", sortable=True),
                redisearch.NumericField("timestamp", sortable=True),
                redisearch.TagField("dc_id"),
            ]
        )
    except redis.exceptions.ResponseError as e:
        if not str(e).startswith("Index already exists"):  # We don't care if index already exists
            raise
    _log.info("Listing all blocks in storage")
    block_paths = storage.list_objects("BLOCK/")
    pattern = re.compile(r"BLOCK\/([0-9]+)-([Ll])5(.*)$")
    for block_path in block_paths:
        if LEVEL == "1" and BROADCAST_ENABLED and re.search(pattern, block_path):
            if not client.redis.sismember(L5_BLOCK_MIGRATION_KEY, block_path):
                raw_block = storage.get_json_from_object(block_path)
                block = l5_block_model.new_from_at_rest(raw_block)
                put_document(Indexes.verification.value, block_path.split("/")[1], block.export_as_search_index())
                client.redis.sadd(L5_NODES, block.dc_id)
                client.redis.sadd(L5_BLOCK_MIGRATION_KEY, block_path)
            else:
                _log.info(f"Skipping already indexed L5 block {block_path}")
Пример #5
0
def _generate_block_indexes_from_scratch() -> None:
    client = _get_redisearch_index_client(Indexes.block.value)
    client.create_index([
        redisearch.NumericField("block_id", sortable=True),
        redisearch.NumericField("prev_id", sortable=True),
        redisearch.NumericField("timestamp", sortable=True),
    ])
    _log.info("Listing all blocks in storage")
    block_paths = storage.list_objects("BLOCK/")
    pattern = re.compile(r"BLOCK\/[0-9]+$")
    for block_path in block_paths:
        if re.search(pattern, block_path):
            _log.info(f"Adding index for {block_path}")
            raw_block = storage.get_json_from_object(block_path)
            block = cast("model.BlockModel", None)
            if LEVEL == "1":
                block = l1_block_model.new_from_stripped_block(raw_block)
            elif LEVEL == "2":
                block = l2_block_model.new_from_at_rest(raw_block)
            elif LEVEL == "3":
                block = l3_block_model.new_from_at_rest(raw_block)
            elif LEVEL == "4":
                block = l4_block_model.new_from_at_rest(raw_block)
            elif LEVEL == "5":
                block = l5_block_model.new_from_at_rest(raw_block)
            put_document(Indexes.block.value, block.block_id,
                         block.export_as_search_index())
Пример #6
0
def get_broadcast_dto(higher_level: int, block_id: str) -> Dict[str, Any]:
    """Get the broadcast dto for a block to a certain level
    Args:
        higher_level: (2-5)
        block_id: block_id used to locate block in storage
    Returns:
        Broadcast DTO of requested
    """
    l1_block = storage.get_json_from_object(f"{FOLDER}/{block_id}")
    if higher_level == 2:
        return l1_block_model.export_broadcast_dto(l1_block)
    else:
        required_verification_count = dragonnet_config.DRAGONNET_CONFIG[
            f"l{higher_level - 1}"]["nodesRequired"]
        verification_blocks = get_verifications_for_l1_block(
            block_id, (higher_level - 1))
        if len(verification_blocks) < required_verification_count:
            raise exceptions.NotEnoughVerifications(
                f"This chain requires {required_verification_count} level {higher_level - 1} verifications, but only {len(verification_blocks)} were found. (Probably just need to wait)"  # noqa: B950
            )
        if higher_level == 3:
            return l2_block_model.export_broadcast_dto(verification_blocks,
                                                       l1_block)
        elif higher_level == 4:
            return l3_block_model.export_broadcast_dto(verification_blocks,
                                                       l1_block)
        elif higher_level == 5:
            return l4_block_model.export_broadcast_dto(verification_blocks)
        else:
            raise exceptions.InvalidNodeLevel(
                f"Level {higher_level} is not valid for getting a broadcast DTO (Only allowed 2-5)"
            )
Пример #7
0
def check_confirmations() -> None:
    last_confirmed_block = get_last_confirmed_block()
    last_confirmed_block_number = last_confirmed_block["block_id"]
    last_created_block = get_last_block_number()

    _log.info(f"[L5] Last confirmed block is {last_confirmed_block_number}, last created block is {last_created_block}")

    if int(last_confirmed_block_number) < int(last_created_block):
        # Check for confirmations
        next_block_to_confirm = int(last_confirmed_block_number) + 1
        block_key = f"BLOCK/{next_block_to_confirm}"
        block = l5_block_model.new_from_at_rest(storage.get_json_from_object(block_key))

        for txn_hash in block.transaction_hash:
            confirmed = INTERCHAIN.is_transaction_confirmed(txn_hash)

            #  If this function returned the transaction hash, that means it was dropped so we
            #  remove it from the block, otherwise if it returned true that means it was confirmed.
            #  When broadcast retry occurs, the removed hashes will be removed in storage.
            if isinstance(confirmed, str):
                block.transaction_hash.remove(txn_hash)
            elif confirmed:
                finalize_block(block, last_confirmed_block, txn_hash)

                # Stop execution here!
                return

        # If execution did not stop in the above for loop, we know that the block is not confirmed.
        retry_broadcast_if_necessary(block)
Пример #8
0
def get_registered_transaction_type(transaction_type: str) -> transaction_type_model.TransactionTypeModel:
    """
    Searches for a registered transaction type by name
    :param transaction_type
    """
    _log.info(f"Attempting to get type {transaction_type}")
    result = storage.get_json_from_object(f"{FOLDER}/{transaction_type}")
    return transaction_type_model.new_from_at_rest(result)
Пример #9
0
def update_api_key_v1(key_id: str, nickname: str) -> None:
    """Updates the nickname for an existing key
    Args:
        key_id: ID of api key to update
        nickname: new nickname for the given key
    """
    key = storage.get_json_from_object(f"KEYS/{key_id}")
    key["nickname"] = nickname
    storage.put_object_as_json(f"KEYS/{key_id}", key)
Пример #10
0
def list_registered_transaction_types_v1() -> Dict[str, Any]:
    """
    Lists out the current registered transaction types
    """
    _log.info("Listing out existing transaction types")
    transaction_types = redis.smembers_sync(transaction_type_dao.TYPE_LIST_KEY)
    transaction_type_list = [storage.get_json_from_object(f"{transaction_type_dao.FOLDER}/TYPES/{txn_type}") for txn_type in transaction_types]

    return {"transaction_types": transaction_type_list}
Пример #11
0
def update_registration(new_data: dict) -> None:
    try:
        _log.info(f"[MATCHMAKING] Putting matchmaking config in storage: {new_data}")
        matchmaking_config = storage.get_json_from_object("MATCHMAKING_CONFIG.json")
        matchmaking_config.update(new_data)
        storage.put_object_as_json("MATCHMAKING_CONFIG.json", matchmaking_config)
        register()
    except Exception:
        raise exceptions.MatchmakingError("Failure updating matchmaking data")
Пример #12
0
def get_interchain_client(blockchain: str,
                          name: str) -> "model.InterchainModel":
    """Get a specific interchain client
    Args:
        blockchain: the blockchain of the desired client (i.e. bitcoin, ethereum, etc)
        name: the name (id) of the network to get (user defined on the creation of the interchain)
    Raises:
        exceptions.NotFound: When the requested client can't be found
    """
    if blockchain == "bitcoin":
        return btc.new_from_at_rest(
            storage.get_json_from_object(f"{FOLDER}/bitcoin/{name}"))
    elif blockchain == "ethereum":
        return eth.new_from_at_rest(
            storage.get_json_from_object(f"{FOLDER}/ethereum/{name}"))
    else:
        raise exceptions.NotFound(
            f"Blockchain network {blockchain} is not supported")
Пример #13
0
def get_last_block_proof() -> Dict[str, str]:
    """Return the last closed block's ID and hash
    Returns:
        Result of last closed block lookup (empty dictionary if not found)
    """
    try:
        return storage.get_json_from_object(f"{FOLDER}/{LAST_CLOSED_KEY}")
    except exceptions.NotFound:
        return {}
Пример #14
0
def get_block_by_id_v1(block_id: str, parse: bool = False) -> Dict[str, Any]:
    """Searches for a block by a specific block ID
    Args:
        block_id: The block id to get
        parse: whether or not to parse the result automatically
    """
    raw_block = storage.get_json_from_object(f"BLOCK/{block_id}")
    if parse and raw_block["dcrn"] == schema.DCRN.Block_L1_At_Rest.value:
        for index, transaction in enumerate(raw_block["transactions"]):
            raw_block["transactions"][index] = json.loads(transaction)
    return raw_block
Пример #15
0
def get_auth_key(auth_key_id: str, interchain: bool) -> Optional[str]:
    """Retrieve the auth key corresponding to a key id
    Args:
        auth_key_id: The key id to grab (if interchain, this is the interchain dcid)
        interchain: boolean whether the key to get is an interchain key or not
    Returns:
        The base64 encoded auth key string corresponding to the id (None if not found)
    """
    response = None
    try:
        if interchain:
            response = storage.get_json_from_object(
                f"KEYS/INTERCHAIN/{auth_key_id}")
        else:
            response = storage.get_json_from_object(f"KEYS/{auth_key_id}")
    except exceptions.NotFound:
        pass
    if response:
        return response.get("key")
    return None
Пример #16
0
def get_verifications_for_l1_block(block_id: str,
                                   level: int) -> List[Dict[str, Any]]:
    try:
        keys = list(
            broadcast_functions.
            get_receieved_verifications_for_block_and_level_sync(
                block_id, level))
        if len(keys) != 0:
            for i in range(len(keys)):
                keys[
                    i] = f"{FOLDER}/{block_id}-l{level}-{keys[i].decode('utf8')}"
            return list(
                map(lambda key: storage.get_json_from_object(key), keys))
    except Exception:
        pass
    # Only fall back to listing from storage if we don't have verifications already saved in redis
    prefix = f"{FOLDER}/{block_id}-l{level}"
    keys = storage.list_objects(prefix)
    _log.info(f"Verification keys by prefix {prefix}: {keys}")
    return [] if len(keys) == 0 else list(
        map(lambda key: storage.get_json_from_object(key), keys))
Пример #17
0
def get_verifications_for_l1_block(block_id: str,
                                   level: int) -> List[Dict[str, Any]]:
    try:
        keys = list(
            broadcast_functions.
            get_receieved_verifications_for_block_and_level_sync(
                block_id, level))
        if len(keys) != 0:
            for i in range(len(keys)):
                keys[i] = f"{FOLDER}/{block_id}-l{level}-{keys[i]}"
            return [storage.get_json_from_object(x) for x in keys]
    except Exception:
        _log.exception(
            "Error getting verifications from cached list. Falling back to direct storage list"
        )
    # Only fall back to listing from storage if we don't have verifications already saved in redis
    prefix = f"{FOLDER}/{block_id}-l{level}"
    keys = storage.list_objects(prefix)
    _log.info(f"Verification keys by prefix {prefix}: {keys}")
    return [] if len(keys) == 0 else [
        storage.get_json_from_object(key) for key in keys
    ]
Пример #18
0
def _generate_smart_contract_indexes() -> None:
    delete_index(Indexes.smartcontract.value)  # Always generate smart contract indexes from scratch by dropping existing ones
    client = _get_redisearch_index_client(Indexes.smartcontract.value)
    client.create_index([redisearch.TagField("sc_name")])
    # Find what smart contracts exist in storage
    _log.info("Listing all smart contracts in storage")
    sc_object_paths = storage.list_objects("SMARTCONTRACT/")
    pattern = re.compile(r"SMARTCONTRACT\/.{36}\/metadata\.json$")
    for sc in sc_object_paths:
        if re.search(pattern, sc):
            sc_model = smart_contract_model.new_from_at_rest(storage.get_json_from_object(sc))
            _log.info(f"Adding index for smart contract {sc_model.id} ({sc_model.txn_type})")
            put_document(Indexes.smartcontract.value, sc_model.id, sc_model.export_as_search_index())
Пример #19
0
def list_api_keys(include_interchain: bool) -> List[api_key_model.APIKeyModel]:
    """Retrieve a list of api keys
    Args:
        include_interchain: whether or not to include interchain api keys
    Returns:
        List of api key models
    """
    # Get keys from storage, excluding migration marker and interchain keys
    return_list = []
    for key in storage.list_objects(prefix=FOLDER):
        if (MIGRATION_V1 in key) or (key.startswith("KEYS/INTERCHAIN")
                                     and not include_interchain):
            continue
        return_list.append(
            api_key_model.new_from_at_rest(storage.get_json_from_object(key)))
    return return_list
Пример #20
0
def get_default_interchain_client() -> "model.InterchainModel":
    """Get the interchain model which has been set as the default for this chain
    Returns:
        Instantiated InterchainModel
    Raises:
        exceptions.NotFound: When default has not been set, or set default cannot be found
        NotImplementedError: WHen the saved default is a bad version
    """
    default_dto = storage.get_json_from_object(f"{FOLDER}/default")
    if default_dto.get("version") == "1":
        return get_interchain_client(default_dto.get("blockchain"),
                                     default_dto.get("name"))
    else:
        raise NotImplementedError(
            f"Default dto error. Version {default_dto.get('version')} not supported"
        )
Пример #21
0
def list_contracts_v1() -> Dict[str, List[Dict[str, Any]]]:
    """Function used by the smartcontract endpoint with method GET.
        Returns a list of all smart contracts.
    Returns:
        The search results of the query specified.
    """
    sc_list = smart_contract_dao.list_all_contract_ids()
    sc_metadata = []
    for sc_id in sc_list:
        try:
            sc_metadata.append(
                storage.get_json_from_object(
                    f"{smart_contract_dao.FOLDER}/{sc_id}/metadata.json"))
        except exceptions.NotFound:  # If smart contract metadata is not found, simply ignore it and don't add it to the list
            pass
    return {"smart_contracts": sc_metadata}
Пример #22
0
def get_api_key_v1(key_id: str) -> Dict[str, Any]:
    """Returns the api key information (without the actual key itself) for a key id
    Args:
        key_id: ID of api key to get
        hide_key: remove the api key from the returned key
    Returns:
        API key ID and registration timestamp (if any)
    """
    if key_id.startswith("SC_") or key_id.startswith(
            "WEB_") or key_id.startswith("INTERCHAIN"):
        raise exceptions.NotFound(f"api key with ID {key_id} not found")
    key = storage.get_json_from_object(f"KEYS/{key_id}")
    return {
        "id": str(key["id"]),
        "registration_time": int(key["registration_time"]),
        "nickname": str(key.get("nickname") or "")
    }
Пример #23
0
def get_api_key(key_id: str, interchain: bool) -> api_key_model.APIKeyModel:
    """Get an api key from storage
    Args:
        key_id: The key id to fetch (public chain id if interchain)
        interchain: Whether or not this is an interchain key
    """
    # Explicitly don't allow permission keys with slashes (may be malicious)
    if "/" in key_id:
        raise exceptions.NotFound
    model = api_key_model.new_from_at_rest(
        storage.get_json_from_object(
            f"{INTERCHAIN_FOLDER if interchain else FOLDER}/{key_id}"))
    if model.interchain != interchain:  # Double check the interchain value of the key is what we expect; otherwise panic
        raise RuntimeError(
            f"Bad interchain key {key_id} found. Expected interchain: {interchain} but got {model.interchain}"
        )
    return model
Пример #24
0
def list_interchain_clients(blockchain: str) -> List["model.InterchainModel"]:
    """Get all of the interchain clients for a specific blockchain type
    Args:
        blockchain: The blockchain of the desired clients to get
    Returns:
        List of instantiated interchain clients for the specified blockchain
    """
    from_rest_function: Any = None
    if blockchain == "bitcoin":
        from_rest_function = btc.new_from_at_rest
    elif blockchain == "ethereum":
        from_rest_function = eth.new_from_at_rest
    else:
        raise exceptions.NotFound(
            f"Blockchain network {blockchain} is not supported")

    return [
        from_rest_function(storage.get_json_from_object(x))
        for x in storage.list_objects(f"{FOLDER}/{blockchain}/")
    ]
Пример #25
0
def get_pending_l4_blocks(block_id: str) -> List[str]:
    all_waiting_verification_keys = storage.list_objects(f"BROADCAST/TO_BROADCAST/{block_id}")

    l4_blocks = []
    for key in all_waiting_verification_keys:
        record_list = storage.get_json_from_object(key)

        for record in record_list:
            item = {
                "l1_dc_id": record["header"]["l1_dc_id"],
                "l1_block_id": record["header"]["l1_block_id"],
                "l4_dc_id": record["header"]["dc_id"],
                "l4_block_id": record["header"]["block_id"],
                "l4_proof": record["proof"]["proof"],
            }
            if record.get("is_invalid"):
                item["is_invalid"] = record.get("is_invalid")
            l4_blocks.append(json.dumps(item, separators=(",", ":")))

    return l4_blocks
Пример #26
0
def query_interchain_broadcasts_v1(block_id: str) -> List[Any]:
    """Return the subsequent broadcasts to other L5 networks"""
    _log.info(f"Getting subsequent L5 verifications for {block_id}")
    results = []
    l5_block = None
    l5_verifications = _get_verification_records(block_id, 5)
    if len(l5_verifications) > 0:
        l5_block = cast(List[Any], l5_verifications)[0]
        timestamp = l5_block["header"]["timestamp"]
        dc_id = l5_block["header"]["dc_id"]
        l5_nodes = redisearch._get_redisearch_index_client(
            redisearch.Indexes.verification.value).redis.smembers(
                redisearch.L5_NODES)
        results = [
            _query_l5_verification(l5_dc_id.decode("utf-8"), timestamp)
            for l5_dc_id in l5_nodes if l5_dc_id.decode("utf-8") != dc_id
        ]
    return ([l5_block] if l5_block else []) + [
        storage.get_json_from_object(f"BLOCK/{x}")
        for x in results if x is not None
    ]
Пример #27
0
def get_api_key_list_v1() -> Dict[str, List[Dict[str, Any]]]:
    """
    Gets the list of api key IDs
    Returns:
        List of API keys
    """
    keys = storage.list_objects(prefix=FOLDER)
    valid_keys = list(
        filter(
            lambda x: not x.startswith("KEYS/WEB_") and not x.startswith(
                "KEYS/SC_") and not x.startswith("KEYS/INTERCHAIN"), keys))
    returned_keys = []
    for key in valid_keys:
        resp = storage.get_json_from_object(key)
        returned_keys.append({
            "id":
            str(resp["id"]),
            "registration_time":
            int(resp["registration_time"]),
            "nickname":
            str(resp.get("nickname") or "")
        })
    return {"keys": returned_keys}
Пример #28
0
def _generate_block_indexes() -> None:
    client = _get_redisearch_index_client(Indexes.block.value)
    try:
        client.create_index([
            redisearch.NumericField("block_id", sortable=True),
            redisearch.NumericField("prev_id", sortable=True),
            redisearch.NumericField("timestamp", sortable=True),
        ])
    except redis.exceptions.ResponseError as e:
        if not str(e).startswith("Index already exists"
                                 ):  # We don't care if index already exists
            raise
    _log.info("Listing all blocks in storage")
    block_paths = storage.list_objects("BLOCK/")
    pattern = re.compile(r"BLOCK\/[0-9]+$")
    for block_path in block_paths:
        if re.search(pattern, block_path):
            # do a check to see if this block was already marked as indexed
            if not client.redis.sismember(BLOCK_MIGRATION_KEY, block_path):
                _log.info(f"Adding index for {block_path}")
                raw_block = storage.get_json_from_object(block_path)
                block = cast("model.BlockModel", None)
                if LEVEL == "1":
                    block = l1_block_model.new_from_stripped_block(raw_block)
                elif LEVEL == "2":
                    block = l2_block_model.new_from_at_rest(raw_block)
                elif LEVEL == "3":
                    block = l3_block_model.new_from_at_rest(raw_block)
                elif LEVEL == "4":
                    block = l4_block_model.new_from_at_rest(raw_block)
                elif LEVEL == "5":
                    block = l5_block_model.new_from_at_rest(raw_block)
                put_document(Indexes.block.value, block.block_id,
                             block.export_as_search_index())
                client.redis.sadd(BLOCK_MIGRATION_KEY, block_path)
            else:
                _log.info(f"Skipping already indexed block {block_path}")
Пример #29
0
def _level_records(block_id: str, level: int) -> List[Any]:
    return [
        storage.get_json_from_object(key)
        for key in storage.list_objects(f"BLOCK/{block_id}-l{level}")
    ]
def list_registered_transaction_types() -> List[Dict[str, Any]]:
    return [
        storage.get_json_from_object(txn_type)
        for txn_type in storage.list_objects(f"{FOLDER}/")
    ]