async def send_notification_verification( session: aiohttp.ClientSession, url: str, verification_bytes: bytes, signature: str, redis_list_value: str ) -> None: """ Send a notification verification to a preconfigured address This is the actual async broadcast of a single notification at its most atomic Args: session: aiohttp session for http requests url: The url to which bytes should be POSTed verification_bytes: the verification object read from disk as bytes signature: The signature of the bytes, signed by this dragonchain redis_list_value: the key within a redis set which should be removed after successful http request Returns: None """ _log.debug(f"Notification -> {url}") try: resp = await session.post( url=url, data=verification_bytes, headers={"Content-Type": "application/json", "dragonchainId": keys.get_public_id(), "signature": signature}, timeout=HTTP_REQUEST_TIMEOUT, ) _log.debug(f"Notification <- {resp.status} {url}") except Exception: _log.exception(f"Unable to send verification notification.") await broadcast_functions.remove_notification_verification_for_broadcast_async(redis_list_value)
def register_new_interchain_key_with_remote(interchain_dcid: str) -> str: """Make a new auth key and register it with a remote dragonchain for inter-level communication Args: interchain_dcid: chain id of the interchain sharing this key Returns: auth key string of the newly shared key Raises: RuntimeError when bad response from chain or couldn't save to storage """ # We need to estabilish a shared HMAC key for this chain before we can post auth_key = gen_auth_key() signature = keys.get_my_keys().make_signature( f"{interchain_dcid}_{auth_key}".encode("utf-8"), crypto.SupportedHashes.sha256) new_key = { "dcid": keys.get_public_id(), "key": auth_key, "signature": signature } try: r = requests.post( f"{matchmaking.get_dragonchain_address(interchain_dcid)}/v1/interchain-auth-register", json=new_key, timeout=30) except Exception as e: raise RuntimeError( f"Unable to register shared auth key with dragonchain {interchain_dcid}\nError: {e}" ) if r.status_code < 200 or r.status_code >= 300: raise RuntimeError( f"Unable to register shared auth key with dragonchain {interchain_dcid}\nStatus code: {r.status_code}" ) if not save_interchain_auth_key(interchain_dcid, auth_key): raise RuntimeError("Unable to add new interchain auth key to storage") return auth_key
def create_block( l1_headers: "L1Headers", ddss: Union[str, float, int], valid_block_count: int, regions: List[str], clouds: List[str], l2_blocks: Iterable["l2_block_model.L2BlockModel"], ) -> l3_block_model.L3BlockModel: block_id, prev_proof = get_next_block_info() # Pull configuration from matchmaking directly to get DDSS (not stored locally) l2_proofs = [] for block in l2_blocks: l2_proofs.append({"dc_id": block.dc_id, "block_id": block.block_id, "proof": block.proof}) l3_block = l3_block_model.L3BlockModel( dc_id=keys.get_public_id(), current_ddss=party.get_address_ddss(ADDRESS), # Get DDSS from party, cached hourly block_id=str(block_id), timestamp=str(math.floor(time.time())), prev_proof=prev_proof, scheme=PROOF_SCHEME, l1_dc_id=l1_headers["dc_id"], l1_block_id=l1_headers["block_id"], l1_proof=l1_headers["proof"], l2_proofs=l2_proofs, ddss=str(ddss), l2_count=str(valid_block_count), regions=regions, clouds=clouds, ) sign_block(l3_block) return l3_block
def register_new_key_with_matchmaking() -> str: """Make a new auth key and register it with matchmaking Returns: auth key string of the newly shared key Raises: RuntimeError when bad response from chain or couldn't save to storage """ auth_key = api_key_model.gen_auth_key() signature = keys.get_my_keys().make_signature( f"matchmaking_{auth_key}".encode("utf-8"), crypto.SupportedHashes.sha256) new_key = { "dcid": keys.get_public_id(), "key": auth_key, "signature": signature } try: r = requests.post(f"{matchmaking.MATCHMAKING_ADDRESS}/auth-register", json=new_key, timeout=30) except Exception as e: raise RuntimeError( f"Unable to register shared auth key with matchmaking\nError: {e}") if r.status_code < 200 or r.status_code >= 300: raise RuntimeError( f"Unable to register shared auth key with matchmaking\nStatus code: {r.status_code}" ) if not save_matchmaking_auth_key(auth_key): raise RuntimeError("Unable to add new interchain auth key to storage") return auth_key
def new_from_full_transactions(full_transactions_array: List[ transaction_model.TransactionModel], block_id: str, prev_id: str, prev_proof: str) -> "L1BlockModel": """ Used in creating new blocks Input: List of TransactionModels, previous ID and proof Returns: BlockModel object """ # Check the types of input, as there is no schema for a list of full transactions if not isinstance(full_transactions_array, list): raise TypeError("Invalid input types to create new block model.") # Assign the current block ID to every transaction for transaction in full_transactions_array: if not isinstance(transaction, transaction_model.TransactionModel): raise TypeError("Invalid input types to create new block model.") transaction.block_id = block_id return L1BlockModel( dc_id=keys.get_public_id(), block_id=block_id, timestamp=str(math.floor(time.time())), prev_proof=prev_proof, prev_id=prev_id, transactions=full_transactions_array, )
def populate_env(self) -> None: """Populate environment variables for the job""" self.model.env["STAGE"] = STAGE self.model.env["INTERNAL_ID"] = INTERNAL_ID self.model.env["DRAGONCHAIN_ID"] = keys.get_public_id() self.model.env["DRAGONCHAIN_ENDPOINT"] = DRAGONCHAIN_ENDPOINT self.model.env["SMART_CONTRACT_ID"] = self.model.id self.model.env["SMART_CONTRACT_NAME"] = self.model.txn_type
def get_associated_l1_block_id(self) -> set: """Interface function for compatibility""" # Scrape the L4 blocks array to find all entries in the L5 block for this L1 l4_blocks_for_l1 = set() for x in self.l4_blocks: l4_block_candidate = json.loads(x) if l4_block_candidate["l1_dc_id"] == keys.get_public_id(): l4_blocks_for_l1.add(l4_block_candidate["l1_block_id"]) # else no-op return l4_blocks_for_l1
def _generate_transaction_model(transaction: Dict[str, Any]) -> transaction_model.TransactionModel: """ Returns a transaction_model.TransactionModel instance with Dragonchain ID, Transaction ID, and Transaction timestamp """ txn_model = transaction_model.new_from_user_input(transaction) txn_model.dc_id = keys.get_public_id() txn_model.txn_id = str(uuid.uuid4()) txn_model.timestamp = str(math.floor(time.time())) return txn_model
def generate_authenticated_request( http_verb: str, dcid: str, full_path: str, json_content: dict = None, hmac_hash_type: str = "SHA256") -> Tuple[dict, bytes]: """Generate request data (headers and body) for making authenticated http requests to other dragonchains or matchmaking Args: http_verb: string of the http verb that will be used for this request (i.e. GET, POST, etc) dcid: the dragonchain id to make this request for. If this is for matchmaking, specify the string 'matchmaking' instead full_path: full path of the request after the FQDN (including any query parameters) (i.e. /matchmaking/2?qty=3) json_content: dictionary object to use as the json body of the request (only include if request has a body) hmac_hash_type: the hmac hash type to use for this request Returns: Tuple where index 0 is the headers dictionary to use, and index 1 is the byte data (body) to use for an http request """ if json_content is None: json_content = {} auth_key = None matchmaking = dcid == "matchmaking" http_verb = http_verb.upper() # First check if we already have a shared HMAC key for sending to this endpoint if matchmaking: auth_key = get_matchmaking_key() if auth_key is None: # We need to estabilish a shared HMAC key with matchmaking before we can make a request auth_key = register_new_key_with_matchmaking() else: try: auth_key = api_key_dao.get_api_key(dcid, interchain=True).key except exceptions.NotFound: # We need to estabilish a shared HMAC key for this chain before we can make a request auth_key = register_new_interchain_key_with_remote(dcid).key timestamp = get_now_datetime().isoformat() + "Z" content_type = "" content = b"" if json_content: content_type = "application/json" content = json.dumps(json_content, separators=(",", ":")).encode("utf-8") headers = { "timestamp": timestamp, "Authorization": get_authorization(keys.get_public_id(), auth_key, http_verb, full_path, dcid, timestamp, content_type, content, hmac_hash_type), } # Only add dragonchain header for inter-dragonchain communication if not matchmaking: headers["dragonchain"] = dcid # Only add content type header if it exists if content_type: headers["Content-Type"] = content_type return headers, content
def export_broadcast_dto(l3_blocks: List[Dict[str, Any]], l1_block: Dict[str, Any]) -> Dict[str, Any]: stripped_proof = l1_block["proof"]["proof"] l1_block_id = l1_block["header"]["block_id"] return { "version": "1", "payload": { "header": { "dc_id": keys.get_public_id(), "block_id": l1_block_id, "stripped_proof": stripped_proof }, "l3-blocks": l3_blocks }, }
def create_l5_block(block_id: str) -> l5_block_model.L5BlockModel: """ Creates unfinalized L5 block that needs confirmation """ l5_block = l5_block_model.L5BlockModel( dc_id=keys.get_public_id(), current_ddss=party.get_address_ddss( ADDRESS), # Get DDSS from party, cached hourly block_id=str(block_id), timestamp=str(math.floor(time.time())), prev_proof="", scheme=PROOF_SCHEME, l4_blocks=get_pending_l4_blocks(block_id), ) return l5_block
def ledger_contract_action(action: str, txn_type: str, entrypoint: str, image_digest: str) -> None: """Ledgers contract data when submit contract is a success Args: action (Enum): Which action to perform when ledgering txn_type (str): Transaction type to post to the chain image_digest (str): Docker image SHA-256 to use in ledgering """ model = transaction_model.TransactionModel( txn_type=namespace.Namespaces.Contract.value, dc_id=keys.get_public_id(), txn_id=str(uuid.uuid4()), tag=f"contract:{txn_type}", timestamp=str(int(time.time())), payload={"action": action, "txn_type": txn_type, "contract_entrypoint": entrypoint, "image_digest": image_digest}, ) queue.enqueue_generic(model.export_as_queue_task(), queue=queue.INCOMING_TX_KEY, deadline=0)
def get_v1_status() -> Dict[str, Any]: matchmaking_data = matchmaking.get_matchmaking_config() response: Dict[str, Any] = { "id": str(keys.get_public_id()), "level": int(matchmaking_data["level"]), "url": str(matchmaking_data["url"]), "hashAlgo": str(matchmaking_data["hashAlgo"]), "scheme": str(matchmaking_data["scheme"]), "version": str(matchmaking_data["version"]), "encryptionAlgo": str(matchmaking_data["encryptionAlgo"]), } # Return extra data if level 5 if os.environ["LEVEL"] == "5": response["funded"] = bool(matchmaking_data["funded"]) response["network"] = str(matchmaking_data["network"]) response["broadcastInterval"] = float(matchmaking_data["broadcastInterval"]) response["interchainWallet"] = str(matchmaking_data["interchainWallet"]) return response
def create_block(l1_headers: "L1Headers", validations: List[Dict[str, Any]]) -> l4_block_model.L4BlockModel: block_id, prev_proof = get_next_block_info() l4_block = l4_block_model.L4BlockModel( dc_id=keys.get_public_id(), current_ddss=party.get_address_ddss(ADDRESS), # Get DDSS from party, cached hourly block_id=str(block_id), timestamp=str(math.floor(time.time())), prev_proof=prev_proof, scheme=PROOF_SCHEME, l1_dc_id=l1_headers["dc_id"], l1_block_id=l1_headers["block_id"], l1_proof=l1_headers["proof"], validations=validations, ) sign_block(l4_block) return l4_block
def submit_invocation_request(self) -> None: """Submit this model as an invocation request to the queue to be handled by the contract invoker""" contract_model = smart_contract_model.SmartContractModel( txn_type=self.txn_type, sc_id=self.id, execution_order=self.execution_order) txn_model = transaction_model.TransactionModel( txn_type=self.txn_type, dc_id=keys.get_public_id(), txn_id="cron", timestamp=str(int(time.time())), payload={}) invoke_request = contract_model.export_as_invoke_request( invoke_transaction=txn_model.export_as_queue_task( dict_payload=True)) _log.info( f"Sending invocation request for txn_type: {self.txn_type} contract_id: {self.id}" ) queue.enqueue_generic(content=invoke_request, queue=queue.CONTRACT_INVOKE_MQ_KEY, deadline=0)
def create_block( l1_block: "l1_block_model.L1BlockModel", transaction_map: Dict[str, Any]) -> l2_block_model.L2BlockModel: block_id, prev_proof = get_next_block_info() l2_block = l2_block_model.L2BlockModel( dc_id=keys.get_public_id(), current_ddss=party.get_address_ddss( ADDRESS), # Get DDSS from party, cached hourly block_id=str(block_id), timestamp=str(math.floor(time.time())), prev_proof=prev_proof, scheme=PROOF_SCHEME, l1_dc_id=l1_block.dc_id, l1_block_id=l1_block.block_id, l1_proof=l1_block.proof, validations_dict=transaction_map, ) sign_block(l2_block) return l2_block
def verify_request_authorization( # noqa: C901 authorization: str, http_verb: str, full_path: str, dcid: str, timestamp: str, content_type: str, content: bytes, interchain: bool, api_resource: str, api_operation: str, api_name: str, ) -> api_key_model.APIKeyModel: """Verify an http request to the webserver Args: authorization: Authorization header of the request http_verb: HTTP Verb of the request (i.e. GET, POST, etc) full_path: full path of the request after the FQDN (including any query parameters) (i.e. /chains/transaction) dcid: dragonchain header of the request timestamp: timestamp header of the request content-type: content-type header of the request (if it exists) content: byte object of the body of the request (if it exists) interchain: boolean whether to use interchain keys to check or not api_resource: the api resource name of this endpoint api_operation: the CRUD api operation of this endpoint ("create", "read", "update", "delete") api_name: the api name of this particular endpoint Raises: exceptions.UnauthorizedException (with message) when the authorization is not valid exceptions.ActionForbidden (with message) when the authorization is valid, but the action is not allowed exceptions.APIRateLimitException (with message) when rate limit is currently exceeded for the provided api key id Returns: The api key model used for this request (if successfully authenticated) """ if dcid != keys.get_public_id(): raise exceptions.UnauthorizedException("Incorrect Dragonchain ID") try: # Note, noqa for typing on re.searches are because we explicitly catch the exceptions and handle below version = re.search("^DC(.*)-HMAC", authorization).group(1) # noqa: T484 if version == "1": hash_type = re.search("HMAC-(.*) ", authorization).group(1) # noqa: T484 try: supported_hash = get_supported_hmac_hash(hash_type) except ValueError: raise exceptions.UnauthorizedException( "Unsupported HMAC Hash Type") # Make sure clock drift isn't too far to prevent replays now = get_now_datetime() request_time = None # Tolerate given timestamps both with/without decimals of a second if "." in timestamp: request_time = datetime.datetime.strptime( timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") else: request_time = datetime.datetime.strptime( timestamp, "%Y-%m-%dT%H:%M:%SZ") delta = datetime.timedelta(seconds=TIMEOUT_SEC) # Allow all requests within +/- TIMEOUT_SEC seconds of the chain's curent time if now + delta < request_time or now - delta > request_time: raise exceptions.UnauthorizedException( "Timestamp of request too skewed") hmac_index = authorization.rfind(":") if hmac_index == -1: raise exceptions.UnauthorizedException( "Malformed Authorization Header") hmac = base64.b64decode(authorization[hmac_index + 1:]) message_string = get_hmac_message_string(http_verb, full_path, dcid, timestamp, content_type, content, supported_hash) try: auth_key_id = re.search(" (.*):", authorization).group(1) # noqa: T484 try: auth_key = api_key_dao.get_api_key(auth_key_id, interchain) except exceptions.NotFound: _log.info( f"Authorization failure from key that does not exist {auth_key_id}" ) raise exceptions.UnauthorizedException( "Invalid HMAC Authentication") # Check if this key should be rate limited (does not apply to interchain keys) if not interchain and should_rate_limit(auth_key_id): raise exceptions.APIRateLimitException( f"API Rate Limit Exceeded. {RATE_LIMIT} requests allowed per minute." ) if crypto.compare_hmac(supported_hash, hmac, auth_key.key, message_string): # Check if this signature has already been used for replay protection if signature_is_replay( f"{auth_key_id}:{base64.b64encode(hmac).decode('ascii')}" ): raise exceptions.UnauthorizedException( "Previous matching request found (no replays allowed)" ) # Check that this key is allowed to perform this action try: if auth_key.is_key_allowed(api_resource, api_operation, api_name, interchain): # Signature is valid and key is allowed; Return the api key used on success return auth_key except Exception: _log.exception( "Uncaught exception checking if api key is allowed" ) raise exceptions.ActionForbidden( f"This key is not allowed to perform {api_name}") else: # HMAC doesn't match raise exceptions.UnauthorizedException( "Invalid HMAC Authentication") except exceptions.DragonchainException: raise except Exception: raise exceptions.UnauthorizedException("Invalid HMAC Format") else: raise exceptions.UnauthorizedException( "Unsupported DC Authorization Version") except exceptions.DragonchainException: raise except Exception: raise exceptions.UnauthorizedException( "Malformed Authorization Header")
from dragonchain.lib import authorization from dragonchain.lib import error_reporter from dragonchain.lib import keys from dragonchain.lib.interfaces import storage from dragonchain.lib.interfaces import registry as registry_interface # Alternate naming to alleviate confusion from dragonchain.lib.database import elasticsearch from dragonchain import exceptions from dragonchain import logger EVENT = os.environ["EVENT"] STAGE = os.environ["STAGE"] IAM_ROLE = os.environ["IAM_ROLE"] FAAS_GATEWAY = os.environ["FAAS_GATEWAY"] FAAS_REGISTRY = os.environ["FAAS_REGISTRY"] INTERNAL_ID = os.environ["INTERNAL_ID"] DRAGONCHAIN_ID = keys.get_public_id() DRAGONCHAIN_ENDPOINT = os.environ["DRAGONCHAIN_ENDPOINT"] _log = logger.get_logger() def main() -> "ContractJob": try: job = ContractJob(task_definition=json.loads(EVENT)) except Exception: raise exceptions.ContractException("Uncaught error in contract job") try: if job.update_model is not None: job.update() change_to_read_user()