def test_token_transfer(client, base_ddo_url, events_object): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() initial_ddo = get_ddo(client, base_ddo_url, did) assert initial_ddo["id"] == did assert initial_ddo["nft"]["owner"] == test_account1.address nft_contract = web3.eth.contract( abi=ERC721Template.abi, address=web3.toChecksumAddress(initial_ddo["nftAddress"]), ) web3.eth.default_account = test_account1.address txn_hash = nft_contract.functions.safeTransferFrom(test_account1.address, test_account2.address, 1).transact() _ = web3.eth.wait_for_transaction_receipt(txn_hash) events_object.process_current_blocks() updated_ddo = get_ddo(client, base_ddo_url, did) assert updated_ddo["id"] == did assert updated_ddo["nft"]["owner"] == test_account2.address
def test_token_uri_update(client, base_ddo_url, events_object): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() initial_ddo = get_ddo(client, base_ddo_url, did) assert initial_ddo["id"] == did assert initial_ddo["nft"]["tokenURI"] == "http://oceanprotocol.com/nft" nft_contract = web3.eth.contract( abi=ERC721Template.abi, address=web3.toChecksumAddress(initial_ddo["nftAddress"]), ) web3.eth.default_account = test_account1.address txn_hash = nft_contract.functions.setTokenURI( 1, "http://something-else.com").transact() _ = web3.eth.wait_for_transaction_receipt(txn_hash) events_object.process_current_blocks() updated_ddo = get_ddo(client, base_ddo_url, did) assert updated_ddo["id"] == did assert updated_ddo["nft"]["tokenURI"] == "http://something-else.com"
def publish_ddo(client, base_ddo_url, events_object): ddo = new_ddo(test_account1, get_web3(), "dt.0") did = ddo.id send_create_update_tx("create", ddo, bytes([0]), test_account1) events_object.process_current_blocks() return did
def test_resolveByDtAddress(client_with_no_data, query_url, events_object): client = client_with_no_data block = get_web3().eth.block_number _ddo = json_dict.copy() ddo = new_ddo(test_account1, get_web3(), f"dt.{block}", _ddo) did = ddo["id"] dt_address = ddo["nftAddress"] send_create_update_tx("create", ddo, bytes([1]), test_account1) events_object.process_current_blocks() result = run_request( client.post, query_url, { "query": { "query_string": {"query": dt_address, "default_field": "nft.address"} } }, ) result = result.json assert len(result["hits"]["hits"]) > 0 base_url = BaseURLs.BASE_AQUARIUS_URL + "/assets" response = client.get( base_url + f"/metadata/{did}", content_type="application/json" ) assert response.headers["Content-Type"] == "application/json" assert response.status_code == 200
def test_publish_unallowed_address(client, base_ddo_url, events_object): _ddo = new_ddo(test_account3, get_web3(), "dt.0") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account3) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo[ "error"] == f"Asset DID {did} not found in Elasticsearch."
def test_publish(client, base_ddo_url, events_object): _ddo = new_ddo(test_account1, get_web3(), "dt.0") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did assert published_ddo["chainId"] == get_web3().eth.chain_id
def test_publish_unallowed_address(client, base_ddo_url, events_object): _ddo = new_ddo(test_account3, get_web3(), f'dt.0') did = _ddo.id ddo_string = json.dumps(dict(_ddo)) data = Web3.toBytes(text=ddo_string) send_create_update_tx('create', did, bytes([0]), data, test_account3) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo == None
def test_dispenser_created(events_object, client, base_ddo_url): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id _, dt_contract, erc20_address = send_create_update_tx( "create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() token_contract = web3.eth.contract( abi=ERC20Template.abi, address=web3.toChecksumAddress(erc20_address)) address_file = get_address_file() with open(address_file) as f: address_json = json.load(f) dispenser_address = address_json["development"]["Dispenser"] tx = token_contract.functions.createDispenser( web3.toChecksumAddress(dispenser_address), web3.toWei("1", "ether"), web3.toWei("1", "ether"), True, "0x0000000000000000000000000000000000000000", ).transact({"from": test_account1.address}) _ = web3.eth.wait_for_transaction_receipt(tx) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["stats"]["price"] == {"value": 0.0}
def test_resolveByDtAddress(client_with_no_data, base_ddo_url, events_object): client = client_with_no_data block = get_web3().eth.blockNumber _ddo = json_before.copy() ddo = new_ddo(test_account1, get_web3(), f'dt.{block}', _ddo) send_create_update_tx( 'create', ddo['id'], bytes([1]), lzma.compress(Web3.toBytes(text=json.dumps(dict(ddo)))), test_account1 ) get_event(EVENT_METADATA_CREATED, block, ddo['id']) events_object.process_current_blocks() assert len( run_request_get_data(client.post, base_ddo_url + '/query', {"query": {"dataToken": [_ddo['dataToken']]}} )['results'] ) > 0
def run_test(client, base_ddo_url, events_instance, flags=None, encryption_key=None): web3 = get_web3() block = web3.eth.blockNumber _ddo = new_ddo(test_account1, web3, f'dt.{block}') did = _ddo.id ddo_string = json.dumps(dict(_ddo)) data = Web3.toBytes(text=ddo_string) _flags = flags or 0 if flags is not None: data = lzma.compress(data) # mark bit 1 _flags = _flags | 1 if encryption_key is not None: # ecies encrypt - bit 2 _flags = _flags | 2 key = eth_keys.KeyAPI.PrivateKey(encryption_key) data = ecies.encrypt(key.public_key.to_hex(), data) send_create_update_tx('create', did, bytes([_flags]), data, test_account1) get_event(EVENT_METADATA_CREATED, block, did) events_instance.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo['id'] == did _ddo['service'][0]['attributes']['main']['name'] = 'Updated ddo by event' ddo_string = json.dumps(dict(_ddo)) data = Web3.toBytes(text=ddo_string) if flags is not None: data = lzma.compress(data) if encryption_key is not None: key = eth_keys.KeyAPI.PrivateKey(encryption_key) data = ecies.encrypt(key.public_key.to_hex(), data) send_create_update_tx('update', did, bytes([_flags]), data, test_account1) get_event(EVENT_METADATA_UPDATED, block, did) events_instance.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo['id'] == did assert published_ddo['service'][0]['attributes']['main']['name'] == 'Updated ddo by event'
def test_process_block_range(client, base_ddo_url, events_object): config_file = app.config["AQUARIUS_CONFIG_FILE"] monitor = EventsMonitor(setup_web3(config_file), config_file) assert monitor.process_block_range( 13, 10) is None # not processing if from > to _ddo = new_ddo(test_account1, get_web3(), "dt.0") send_create_update_tx("create", _ddo, bytes([2]), test_account1) with patch( "aquarius.events.events_monitor.MetadataCreatedProcessor.process" ) as mock: mock.side_effect = Exception("Boom!") assert events_object.process_current_blocks() is None send_create_update_tx("update", _ddo, bytes([2]), test_account1) with patch( "aquarius.events.events_monitor.MetadataUpdatedProcessor.process" ) as mock: mock.side_effect = Exception("Boom!") assert events_object.process_current_blocks() is None
def test_process_fallback(monkeypatch, client, base_ddo_url, events_object): config_file = app.config["AQUARIUS_CONFIG_FILE"] web3 = setup_web3(config_file) block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did events_object._es_instance.delete(did) _ddo["metadata"]["name"] = "Updated ddo by event" send_create_update_tx("update", _ddo, bytes(2), test_account1) # falls back on the MetadataCreatedProcessor # since no es instance means read will throw an Exception with patch("aquarius.events.processors.MetadataCreatedProcessor.process" ) as mock: events_object.process_current_blocks() mock.assert_called()
def test_post_with_no_valid_ddo(client, base_ddo_url, events_object): block = get_web3().eth.block_number ddo = new_ddo(test_account1, get_web3(), f"dt.{block}", json_dict_no_valid_metadata) _ = send_create_update_tx("create", ddo, bytes([1]), test_account1) events_object.process_current_blocks() try: published_ddo = get_ddo(client, base_ddo_url, ddo.id) assert not published_ddo, ( "publish should fail, Aquarius validation " "should have failed and skipped the " f"{EventTypes.EVENT_METADATA_CREATED} event." ) except Exception: pass
def test_query_metadata(client, base_ddo_url, events_object): dao = Dao(config_file=os.environ['CONFIG_FILE']) dao.delete_all() block = get_web3().eth.blockNumber assets = [] txs = [] for i in range(5): ddo = new_ddo(test_account1, get_web3(), f'dt.{i+block}', json_dict) assets.append(ddo) txs.append( send_create_update_tx( 'create', ddo.id, bytes([1]), lzma.compress(Web3.toBytes(text=json.dumps(dict(ddo.items())))), test_account1 ) ) for ddo in assets: get_event(EVENT_METADATA_CREATED, block, ddo.id) events_object.process_current_blocks() num_assets = len(assets) offset = 2 response = run_request_get_data( client.get, base_ddo_url + f'/query?text=white&page=1&offset={offset}') assert response['page'] == 1 assert response['total_pages'] == int(num_assets / offset) + int(num_assets % offset > 0) assert response['total_results'] == num_assets assert len(response['results']) == offset response = run_request_get_data( client.get, base_ddo_url + f'/query?text=white&page=3&offset={offset}') assert response['page'] == 3 assert response['total_pages'] == int( num_assets / offset) + int(num_assets % offset > 0) assert response['total_results'] == num_assets assert len(response['results']) == num_assets - \ (offset * (response['total_pages'] - 1)) response = run_request_get_data( client.get, base_ddo_url + f'/query?text=white&page=4&offset={offset}') assert response['page'] == 4 assert response['total_pages'] == int( num_assets / offset) + int(num_assets % offset > 0) assert response['total_results'] == num_assets assert len(response['results']) == 0
def test_post_with_no_valid_ddo(client, base_ddo_url, events_object): block = get_web3().eth.blockNumber ddo = new_ddo(test_account1, get_web3(), f'dt.{block}', json_dict_no_valid_metadata) ddo_string = json.dumps(dict(ddo.items())) _receipt = send_create_update_tx( 'create', ddo.id, bytes([1]), lzma.compress(Web3.toBytes(text=ddo_string)), test_account1) get_event(EVENT_METADATA_CREATED, block, ddo.id) events_object.process_current_blocks() try: published_ddo = get_ddo(client, base_ddo_url, ddo.id) assert not published_ddo, f'publish should fail, Aquarius validation ' \ f'should have failed and skipped the {EVENT_METADATA_CREATED} event.' except Exception: pass
def run_test(client, base_ddo_url, events_instance, flags): web3 = events_instance._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id _, _, erc20_address = send_create_update_tx("create", _ddo, bytes([flags]), test_account1) events_instance.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did for service in published_ddo["services"]: assert service["datatokenAddress"] == erc20_address assert service["name"] in [ "dataAssetAccess", "dataAssetComputingService" ] _ddo["metadata"]["name"] = "Updated ddo by event" send_create_update_tx("update", _ddo, bytes([flags]), test_account1) events_instance.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did assert published_ddo["metadata"]["name"] == "Updated ddo by event"
def add_assets(_events_object, name, total=5): block = get_web3().eth.block_number assets = [] txs = [] for i in range(total): ddo = new_ddo(test_account1, get_web3(), f"{name}.{i+block}", json_dict) assets.append(ddo) txs.append(send_create_update_tx("create", ddo, bytes([1]), test_account1)[0]) block = txs[0].blockNumber _events_object.store_last_processed_block(block) for ddo in assets: _events_object.process_current_blocks() return assets
def test_metadata_state_update(client, base_ddo_url, events_object): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id send_create_update_tx("create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() initial_ddo = get_ddo(client, base_ddo_url, did) assert initial_ddo["id"] == did # MetadataState updated to other than active should soft delete the ddo from elasticsearch send_set_metadata_state_tx(ddo=_ddo, account=test_account1, state=MetadataStates.DEPRECATED) events_object.process_current_blocks() time.sleep(30) published_ddo = get_ddo(client, base_ddo_url, did) # Check if asset is soft deleted assert "id" not in published_ddo assert list( published_ddo.keys()) == AquariusCustomDDOFields.get_all_values() assert (published_ddo[AquariusCustomDDOFields.EVENT]["tx"] == initial_ddo[ AquariusCustomDDOFields.EVENT]["tx"]) assert (published_ddo[AquariusCustomDDOFields.NFT]["state"] == MetadataStates.DEPRECATED) # MetadataState updated to active should delegate to MetadataCreated processor # and recreate asset send_set_metadata_state_tx(ddo=_ddo, account=test_account1, state=MetadataStates.ACTIVE) events_object.process_current_blocks() time.sleep(30) published_ddo = get_ddo(client, base_ddo_url, did) # Asset has been recreated assert published_ddo["id"] == did # The event after recreation is kept as it uses the same original creation event assert published_ddo["event"]["tx"] == initial_ddo["event"]["tx"] # The NFT state is active assert published_ddo[ AquariusCustomDDOFields.NFT]["state"] == MetadataStates.ACTIVE # MetadataState updated to order disabled should leave the contents intact # but change the state send_set_metadata_state_tx(ddo=_ddo, account=test_account1, state=MetadataStates.ORDERING_DISABLED) events_object.process_current_blocks() time.sleep(30) published_ddo = get_ddo(client, base_ddo_url, did) # Asset id and event are untouched assert published_ddo["id"] == did assert published_ddo["event"]["tx"] == initial_ddo["event"]["tx"] # The NFT state is disabled assert (published_ddo[AquariusCustomDDOFields.NFT]["state"] == MetadataStates.ORDERING_DISABLED) # MetadataState updated to active should delegate to MetadataCreated processor # and reactivate the existing asset send_set_metadata_state_tx(ddo=_ddo, account=test_account1, state=MetadataStates.ACTIVE) events_object.process_current_blocks() time.sleep(30) published_ddo = get_ddo(client, base_ddo_url, did) # Existing asset has been reactivated assert published_ddo["id"] == did # The event after reactivated is kept as it uses the same original creation event assert published_ddo["event"]["tx"] == initial_ddo["event"]["tx"] # The NFT state is active assert published_ddo[ AquariusCustomDDOFields.NFT]["state"] == MetadataStates.ACTIVE
def test_order_started(events_object, client, base_ddo_url): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id _, dt_contract, erc20_address = send_create_update_tx( "create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() token_contract = web3.eth.contract( abi=ERC20Template.abi, address=web3.toChecksumAddress(erc20_address)) token_contract.functions.mint( web3.toChecksumAddress(test_account3.address), web3.toWei(10, "ether")).transact({"from": test_account1.address}) # mock provider fees provider_wallet = get_aquarius_wallet() provider_fee_amount = 0 provider_data = json.dumps({"timeout": 0}, separators=(",", ":")) provider_fee_address = provider_wallet.address provider_fee_token = "0x0000000000000000000000000000000000000000" message_hash = Web3.solidityKeccak( ["bytes", "address", "address", "uint256", "uint256"], [ Web3.toHex(Web3.toBytes(text=provider_data)), provider_fee_address, provider_fee_token, provider_fee_amount, 0, ], ) pk = keys.PrivateKey(provider_wallet.key) prefix = "\x19Ethereum Signed Message:\n32" signable_hash = Web3.solidityKeccak( ["bytes", "bytes"], [Web3.toBytes(text=prefix), Web3.toBytes(message_hash)]) signed = keys.ecdsa_sign(message_hash=signable_hash, private_key=pk) provider_fee = { "providerFeeAddress": web3.toChecksumAddress(provider_fee_address), "providerFeeToken": web3.toChecksumAddress(provider_fee_token), "providerFeeAmount": provider_fee_amount, "providerData": Web3.toHex(Web3.toBytes(text=provider_data)), # make it compatible with last openzepellin https://github.com/OpenZeppelin/openzeppelin-contracts/pull/1622 "v": (signed.v + 27) if signed.v <= 1 else signed.v, "r": Web3.toHex(Web3.toBytes(signed.r).rjust(32, b"\0")), "s": Web3.toHex(Web3.toBytes(signed.s).rjust(32, b"\0")), "validUntil": 0, } txn = token_contract.functions.startOrder( web3.toChecksumAddress(test_account3.address), 1, ( web3.toChecksumAddress(provider_fee["providerFeeAddress"]), web3.toChecksumAddress(provider_fee["providerFeeToken"]), provider_fee["providerFeeAmount"], provider_fee["v"], provider_fee["r"], provider_fee["s"], provider_fee["validUntil"], provider_fee["providerData"], ), ( "0x0000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000", 0, ), ).transact({"from": test_account3.address}) web3.eth.wait_for_transaction_receipt(txn) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["stats"]["orders"] == 1
def test_exchange_created(events_object, client, base_ddo_url): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id _, dt_contract, erc20_address = send_create_update_tx( "create", _ddo, bytes([2]), test_account1) events_object.process_current_blocks() token_contract = web3.eth.contract( abi=ERC20Template.abi, address=web3.toChecksumAddress(erc20_address)) amount = web3.toWei("100000", "ether") rate = web3.toWei("1", "ether") address_file = get_address_file() with open(address_file) as f: address_json = json.load(f) fre_address = address_json["development"]["FixedPrice"] token_contract.functions.mint( web3.toChecksumAddress(test_account3.address), amount).transact({"from": test_account1.address}) ocean_address = web3.toChecksumAddress( address_json["development"]["Ocean"]) tx = token_contract.functions.createFixedRate( web3.toChecksumAddress(fre_address), [ ocean_address, web3.toChecksumAddress(test_account1.address), "0x0000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000", ], [ 18, 18, rate, 0, 0, ], ).transact({"from": test_account1.address}) receipt = web3.eth.wait_for_transaction_receipt(tx) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["stats"]["price"] == { "tokenAddress": ocean_address, "tokenSymbol": "Ocean", "value": 1.0, } fre = get_fre(web3) rate = 2 * rate exchange_id = (fre.events.ExchangeCreated().processReceipt(receipt) [0].args.exchangeId) tx = fre.functions.setRate(exchange_id, rate).transact({"from": test_account1.address}) receipt = web3.eth.wait_for_transaction_receipt(tx) events_object.process_current_blocks() published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["stats"]["price"] == { "tokenAddress": ocean_address, "tokenSymbol": "Ocean", "value": 2.0, }
def test_trigger_caching(client, base_ddo_url, events_object): web3 = events_object._web3 # get_web3() block = web3.eth.block_number _ddo = new_ddo(test_account1, web3, f"dt.{block}") did = _ddo.id txn_receipt, _, erc20_address = send_create_update_tx( "create", _ddo, bytes([2]), test_account1) tx_id = txn_receipt["transactionHash"].hex() with patch("aquarius.app.es_instance.ElasticsearchInstance.get") as mock: mock.side_effect = Exception("Boom!") response = run_request_get_data(client.post, "api/aquarius/assets/triggerCaching", {"transactionId": tx_id}) assert response[ "error"] == "Encountered error when triggering caching: Boom!." response = run_request_get_data(client.post, "api/aquarius/assets/triggerCaching", {"transactionId": tx_id}) assert response["id"] == did published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did for service in published_ddo["services"]: assert service["datatokenAddress"] == erc20_address assert service["name"] in [ "dataAssetAccess", "dataAssetComputingService" ] _ddo["metadata"]["name"] = "Updated ddo by event" txn_receipt, dt_contract, _ = send_create_update_tx( "update", _ddo, bytes([2]), test_account1) tx_id = txn_receipt["transactionHash"].hex() response = run_request_get_data(client.post, "api/aquarius/assets/triggerCaching", {"transactionId": tx_id}) published_ddo = get_ddo(client, base_ddo_url, did) assert published_ddo["id"] == did assert published_ddo["metadata"]["name"] == "Updated ddo by event" assert response["metadata"]["name"] == "Updated ddo by event" # index out of range response = run_request_get_data( client.post, "api/aquarius/assets/triggerCaching", { "transactionId": tx_id, "logIndex": 2 }, ) assert response["error"] == "Log index 2 not found" # can not find event created, nor event updated txn_hash = dt_contract.functions.setTokenURI( 1, "http://something-else.com").transact() txn_receipt = web3.eth.wait_for_transaction_receipt(txn_hash) tx_id = txn_receipt["transactionHash"].hex() response = run_request_get_data(client.post, "api/aquarius/assets/triggerCaching", {"transactionId": tx_id}) assert response[ "error"] == "No metadata created/updated event found in tx."