def __init__(self, event, web3, es_instance, chain_id): self.did = make_did(event.address, chain_id) self.es_instance = es_instance self.event = event self.web3 = web3 try: self.asset = self.es_instance.read(self.did) except Exception: self.asset = None
def new_ddo(account, web3, name, ddo=None): _ddo = ddo if ddo else ddo_event_sample_v4.copy() if "publicKey" not in _ddo or not _ddo["publicKey"]: _ddo["publicKey"] = [{"owner": ""}] _ddo["publicKey"][0]["owner"] = account.address _ddo["random"] = str(uuid.uuid4()) dt_address = deploy_datatoken(web3, account, name, name) chain_id = web3.eth.chain_id _ddo["id"] = make_did(dt_address, chain_id) _ddo["chainId"] = chain_id _ddo["nftAddress"] = dt_address return AttributeDict(_ddo)
def __init__(self, token_address, es_instance, last_sync_block, chain_id): self.did = make_did(token_address, chain_id) self.chain_id = chain_id self.es_instance = es_instance self.token_address = token_address self.last_sync_block = last_sync_block try: self.asset = self.es_instance.read(self.did) except Exception: logger.debug(f"Asset {self.did} is missing from ES.") self.asset = None
def validate_dict(dict_orig, chain_id, nft_address): """Performs shacl validation on a dict. Returns a tuple of conforms, error messages.""" dictionary = copy.deepcopy(dict_orig) dictionary["@type"] = "DDO" extra_errors = {} if "@context" not in dict_orig or not isinstance( dict_orig["@context"], (list, dict) ): extra_errors["@context"] = "Context is missing or invalid." if "metadata" not in dict_orig: extra_errors["metadata"] = "Metadata is missing or invalid." for attr in ["created", "updated"]: if "metadata" not in dict_orig or attr not in dict_orig["metadata"]: continue if not is_iso_format(dict_orig["metadata"][attr]): extra_errors["metadata"] = attr + " is not in iso format." if not chain_id: extra_errors["chainId"] = "chainId is missing or invalid." if not nft_address: extra_errors["nftAddress"] = "nftAddress is missing or invalid." if not make_did(nft_address, str(chain_id)) == dict_orig.get("id"): extra_errors["id"] = "did is not valid for chain Id and nft address" # @context key is reserved in JSON-LD format dictionary["@context"] = {"@vocab": "http://schema.org/"} dictionary_as_string = json.dumps(dictionary) version = dictionary.get("version", CURRENT_VERSION) schema_file = get_schema(version) dataGraph = rdflib.Graph().parse(data=dictionary_as_string, format="json-ld") conforms, results_graph, _ = validate(dataGraph, shacl_graph=schema_file) errors = parse_report_to_errors(results_graph) if extra_errors: conforms = False errors.update(extra_errors) return conforms, errors
def process(self): self.did = make_did(self.event.address, self._chain_id) if self.event.args.state == MetadataStates.ACTIVE: return self.restore_ddo() target_state = self.event.args.state if target_state in [ MetadataStates.END_OF_LIFE, MetadataStates.DEPRECATED, MetadataStates.REVOKED, ]: try: self._es_instance.read(self.did) self.soft_delete_ddo(self.did) except Exception: return self.update_aqua_nft_state_data(self.event.args.state, self.did)
def trigger_caching(): """Triggers manual caching of a specific transaction (MetadataCreated or MetadataUpdated event) --- tags: - name consumes: - application/json parameters: - name: transactionId required: true description: transaction id containing MetadataCreated or MetadataUpdated event - name: logIndex required: false description: zero-based index in log if transaction contains more events responses: 200: description: successful operation. 400: description: bad request. Log index not found or event not found. 500: description: server error/exception """ try: data = request.args if request.args else request.json tx_id = data.get("transactionId") log_index = int(data.get("logIndex", 0)) config_file = app.config["AQUARIUS_CONFIG_FILE"] web3 = setup_web3(config_file) tx_receipt = web3.eth.wait_for_transaction_receipt(tx_id) if len(tx_receipt.logs) <= log_index or log_index < 0: return jsonify(error=f"Log index {log_index} not found"), 400 dt_address = tx_receipt.logs[log_index].address dt_contract = web3.eth.contract( abi=ERC721Template.abi, address=web3.toChecksumAddress(dt_address)) created_event = dt_contract.events.MetadataCreated().processReceipt( tx_receipt, errors=DISCARD) updated_event = dt_contract.events.MetadataUpdated().processReceipt( tx_receipt, errors=DISCARD) if not created_event and not updated_event: return jsonify( error="No metadata created/updated event found in tx."), 400 es_instance = ElasticsearchInstance(config_file) allowed_publishers = get_allowed_publishers() purgatory = (Purgatory(es_instance) if (os.getenv("ASSET_PURGATORY_URL") or os.getenv("ACCOUNT_PURGATORY_URL")) else None) chain_id = web3.eth.chain_id processor_args = [ es_instance, web3, allowed_publishers, purgatory, chain_id ] processor = (MetadataCreatedProcessor if created_event else MetadataUpdatedProcessor) event_to_process = created_event[ 0] if created_event else updated_event[0] event_processor = processor( *([event_to_process, dt_contract, tx_receipt["from"]] + processor_args)) event_processor.process() did = make_did(dt_address, chain_id) response = app.response_class( response=sanitize_record(es_instance.get(did)), status=200, mimetype="application/json", ) return response except Exception as e: logger.error(f"trigger_caching failed: {str(e)}.") return ( jsonify( error=f"Encountered error when triggering caching: {str(e)}."), 500, )
def process(self): txid = self.txid dt_factory = get_dt_factory(self._web3, self._chain_id) if dt_factory.caller.erc721List( self._web3.toChecksumAddress( self.event.address)) != self._web3.toChecksumAddress( self.event.address): logger.error("token not deployed by our factory") if not check_metadata_proofs(self._web3, self.metadata_proofs): try: self._es_instance.delete( make_did(self.event.address, self._chain_id)) except ValueError: pass return asset = decrypt_ddo( self._web3, self.event.args.decryptorUrl, self.event.address, self._chain_id, txid, self.event.args.metaDataHash, ) if not asset: logger.error("Decrypt ddo failed") raise Exception("Decrypt ddo failed") self.did = asset["id"] did, sender_address = self.did, self.sender_address logger.info( f"Process new DDO, did from event log:{did}, block {self.block}, " f"contract: {self.event.address}, txid: {self.txid}, chainId: {self._chain_id}" ) permission = self.check_permission(sender_address) if not permission: raise Exception("RBAC permission denied.") try: old_asset = self._es_instance.read(did) except Exception: # check if this asset was deleted/hidden due to some violation issues # if so, don't add it again logger.warning( f"{did} is not registered, will add it as a new DDO.") event_processor = MetadataCreatedProcessor( self.event, self.dt_contract, self.sender_address, self._es_instance, self._web3, self.allowed_publishers, self.purgatory, self._chain_id, ) return event_processor.process() is_updateable = self.check_update(asset, old_asset, sender_address) if not is_updateable: return False _record = self.make_record(asset, old_asset) if _record: try: self._es_instance.update(json.dumps(_record), did) updated = _record["updated"] logger.info(f"updated DDO did={did}, updated: {updated}") return True except (KeyError, Exception) as err: logger.error( f"encountered an error while updating the asset data to ES: {str(err)}" ) return False