def test_produce_registry_events_from_blockchain(self, mock_get_current_block_no, mock_last_block_number, mock_get_contract_instance): registry_event_producer = RegistryEventProducer(infura_endpoint, Repository(NETWORKS)) org_created_event_object = Mock() event_repository = EventRepository(Repository(NETWORKS)) org_created_event_object.createFilter = Mock( return_value=Mock(get_all_entries=Mock(return_value=[AttributeDict({'args': AttributeDict({ 'orgId': b'snet\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'}), 'event': 'OrganizationCreated', 'logIndex': 1, 'transactionIndex': 15, 'transactionHash': HexBytes( '0x7934a42442792f6d5a171df218b66161021c885085187719c991ec58d7459821'), 'address': '0x663422c6999Ff94933DBCb388623952CF2407F6f', 'blockHash': HexBytes('0x1da77d63b7d57e0a667ffb9f6d23be92f3ffb5f4b27b39b86c5d75bb167d6779'), 'blockNumber': 6243627})]))) mock_get_contract_instance.return_value = Mock( events=Mock(organizationCreated=org_created_event_object, abi=[{"type": "event", "name": "organizationCreated"}])) mock_last_block_number.return_value = 50 mock_get_current_block_no.return_value = 50 blockchain_events = registry_event_producer.produce_event(3) assert blockchain_events == [AttributeDict({'args': AttributeDict({ 'orgId': b'snet\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'}), 'event': 'OrganizationCreated', 'logIndex': 1, 'transactionIndex': 15, 'transactionHash': HexBytes( '0x7934a42442792f6d5a171df218b66161021c885085187719c991ec58d7459821'), 'address': '0x663422c6999Ff94933DBCb388623952CF2407F6f', 'blockHash': HexBytes( '0x1da77d63b7d57e0a667ffb9f6d23be92f3ffb5f4b27b39b86c5d75bb167d6779'), 'blockNumber': 6243627})]
def __init__(self, event_abi, formatter=None): self.event_abi = event_abi self.formatter = formatter self.event_topic = initialize_event_topics(self.event_abi) self.args = AttributeDict( _build_argument_filters_from_event_abi(event_abi)) self._ordered_arg_names = tuple(arg['name'] for arg in event_abi['inputs'])
def make_transfer_event( transaction_hash: Hash32 = Hash32( int_to_big_endian(12345).rjust(32, b"\x00")), from_="0x345DeAd084E056dc78a0832E70B40C14B6323458", to="0x1ADb0A4853bf1D564BbAD7565b5D50b33D20af60", value=1, ) -> AttributeDict: return AttributeDict({ "event": TRANSFER_EVENT_NAME, "transactionHash": HexBytes(transaction_hash), "blockNumber": 1, "transactionIndex": 0, "logIndex": 0, "args": AttributeDict({ "from": from_, "to": to, "value": value }), })
def get_user_orders(self, address, datatoken=None, service_id=None): dt = DataToken(datatoken) _orders = [] for log in dt.get_start_order_logs( self._web3, address, from_all_tokens=not bool(datatoken)): a = dict(log.args.items()) a["amount"] = from_base_18(int(log.args.amount)) a["marketFee"] = from_base_18(int(log.args.marketFee)) a = AttributeDict(a.items()) # 'datatoken', 'amount', 'timestamp', 'transactionId', 'did', 'payer', 'consumer', 'serviceId', 'serviceType' order = Order( log.address, a.amount, a.timestamp, log.transactionHash, f"did:op:{remove_0x_prefix(log.address)}", a.payer, a.consumer, a.serviceId, None, ) if service_id is None or order.serviceId == service_id: _orders.append(order) return _orders
def transfer_event(): """An exemplary transfer event.""" return AttributeDict({ "args": AttributeDict({ "from": "0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf", "to": "0x2946259E0334f33A064106302415aD3391BeD384", "value": 1, }), "event": "Transfer", "logIndex": 5, "transactionIndex": 10, "transactionHash": HexBytes( "0x66ba278660204ddd43f350e9110a8339fd32a227354429744456aac63ff9ef6f" ), "address": "0xF2E246BB76DF876Cef8b38ae84130F4F55De395b", "blockHash": HexBytes( "0x0e9226f0b8eb7b1c0b1652b8c8ce81b1790927bdaa692223ec2fb746e21063f8" ), "blockNumber": 3, })
def get_user_orders( self, address: str, datatoken: Optional[str] = None, service_id: Optional[int] = None, ) -> List[Order]: """ :return: List of orders `[Order]` """ dt = DataToken(self.web3, datatoken) _orders = [] for log in dt.get_start_order_logs( address, from_all_tokens=not bool(datatoken) ): a = dict(log.args.items()) a["amount"] = int(log.args.amount) a["marketFee"] = int(log.args.marketFee) a = AttributeDict(a.items()) # 'datatoken', 'amount', 'timestamp', 'transactionId', 'did', 'payer', 'consumer', 'serviceId', 'serviceType' order = Order( log.address, a.amount, a.timestamp, log.transactionHash, f"did:op:{remove_0x_prefix(log.address)}", a.payer, a.consumer, a.serviceId, None, ) if service_id is None or order.serviceId == service_id: _orders.append(order) return _orders
def response_to_attrdict(f, *args, **kwargs): resp = f(*args, **kwargs) if not resp.text: return AttributeDict({}) # this removes closing dquotes return AttributeDict(resp.json())
def block_values_to_hex(block: AttributeDict) -> AttributeDict: formatted_block: Dict = {} for key in block.keys(): value = block[key] try: formatted_block[key] = HexBytes(value) except binascii.Error: formatted_block[key] = value return AttributeDict(formatted_block)
def __init__( self, event_abi: ABIEvent, abi_codec: ABICodec, formatter: EventData=None ) -> None: self.event_abi = event_abi self.abi_codec = abi_codec self.formatter = formatter self.event_topic = initialize_event_topics(self.event_abi) self.args = AttributeDict( _build_argument_filters_from_event_abi(event_abi, abi_codec)) self._ordered_arg_names = tuple(arg['name'] for arg in event_abi['inputs'])
def get_order(bta, qta, gta, isell, acc): orderset = AttributeDict({ 'baseToken': btoken.address, 'quoteToken': qtoken.address, 'relayer': relayer.address }) data = ym.functions.getConfigData(isell).call() #print("data:", data, data.hex(), HexBytes(data.hex())) sig = AttributeDict({ 'config': HexBytes( '0x71ef3eed6242230a219d9dc7737cb5a3a16059708ee322e96b8c5774105b9b00' ), 'r': HexBytes( '0x71ef3eed6242230a219d9dc7737cb5a3a16059708ee322e96b8c5774105b9b00' ), 's': HexBytes( '0x71ef3eed6242230a219d9dc7737cb5a3a16059708ee322e96b8c5774105b9b00' ) }) to = AttributeDict({ 'trader': acc.address, 'baseTokenAmount': bta * MAGNITUDE, 'quoteTokenAmount': qta * MAGNITUDE, 'gasTokenAmount': gta * MAGNITUDE, 'data': HexBytes(data.hex()), 'signature': sig }) sh, bh, oh = ym.functions.getBQODHash(to, orderset).call() #print("order hash:", oh, oh.hex()) sigh = acc.signHash(oh) #print("sigh:", sigh) sigc = ym.functions.getConfigSignature(HexBytes(sigh['v']), HexBytes(sigh['r']), HexBytes(sigh['s']), 1).call() #print("sig:", sig) sig = AttributeDict({ 'config': HexBytes(sigc[0].hex()), 'r': HexBytes(sigc[1].hex()), 's': HexBytes(sigc[2].hex()) }) to = AttributeDict({ 'trader': acc.address, 'baseTokenAmount': bta * MAGNITUDE, 'quoteTokenAmount': qta * MAGNITUDE, 'gasTokenAmount': gta * MAGNITUDE, 'data': HexBytes(data.hex()), 'signature': sig }) #print(to) return to
def get_transfer_hash_event( event_name: str, transfer_hash: Hash32, transaction_hash: Hash32 ) -> AttributeDict: return AttributeDict( { "event": event_name, "transactionHash": HexBytes(transaction_hash), "logIndex": 0, "args": AttributeDict({"transferHash": HexBytes(transfer_hash)}), } )
def test_admin_node_info(self, w3: "Web3") -> None: result = w3.geth.admin.node_info() expected = AttributeDict({ 'id': '', 'name': '', 'enode': '', 'ip': '', 'ports': AttributeDict({}), 'listenAddr': '', 'protocols': AttributeDict({}) }) # Test that result gives at least the keys that are listed in `expected` assert not set(expected.keys()).difference(result.keys())
def test_admin_nodeInfo(self, w3: "Web3") -> None: with pytest.warns(DeprecationWarning): result = w3.geth.admin.nodeInfo() expected = AttributeDict({ 'id': '', 'name': '', 'enode': '', 'ip': '', 'ports': AttributeDict({}), 'listenAddr': '', 'protocols': AttributeDict({}) }) # Test that result gives at least the keys that are listed in `expected` assert not set(expected).difference(result)
def test_signing(block, private_key): canonicalized_block = get_canonicalized_block(block) signature = calculate_block_signature(canonicalized_block, private_key) resigned_block = AttributeDict( merge(canonicalized_block, {"signature": signature.to_bytes()})) assert get_proposer( resigned_block) == private_key.public_key.to_canonical_address()
def _parse_logs(self, txn_receipt, errors): try: errors.name except AttributeError: raise AttributeError(f'Error flag must be one of: {EventLogErrorFlags.flag_options()}') for log in txn_receipt['logs']: try: rich_log = get_event_data(self.web3.codec, self.abi, log) except (MismatchedABI, LogTopicError, InvalidEventABI, TypeError) as e: if errors == DISCARD: continue elif errors == IGNORE: new_log = MutableAttributeDict(log) new_log['errors'] = e rich_log = AttributeDict(new_log) elif errors == STRICT: raise e else: warnings.warn( f'The log with transaction hash: {log.transactionHash} and ' f'logIndex: {log.logIndex} encountered the following error ' f'during processing: {type(e).__name__}({e}). It has been discarded.' ) continue yield rich_log
def fix_web3_keys(block): return AttributeDict({ **block, "receiptsRoot": HexBytes(block["receipts_root"]), "logsBloom": block["logs_bloom"], })
def get_transfer_event(transaction_hash: Hash32) -> AttributeDict: return AttributeDict( { "event": TRANSFER_EVENT_NAME, "transactionHash": HexBytes(transaction_hash), "logIndex": 0, } )
def getTransaction(self, tx): return AttributeDict({ 'hash': tx, # Hex encoded ascii string "mocked" 'input': '0x6d6f636b6564', 'blockHash': '0x123', 'from': '0x777' })
def get_block_with_aura_fields(self, block): parity_fields = { "author": block["miner"], "sealFields": [b"", b""], "step": block["timestamp"], # use step duration of 1 second } block_with_parity_fields = AttributeDict({**block, **parity_fields}) signature = self._get_signature_for_block(block_with_parity_fields) return {**block, **parity_fields, "signature": signature}
def to_dict(cls, attr_dict: AttributeDict) -> JSONLike: """Simplify to dict.""" if not isinstance(attr_dict, AttributeDict): raise ValueError("No AttributeDict provided.") # pragma: nocover result = { cls._valid_key(key): cls._remove_hexbytes(value) for key, value in attr_dict.items() } return result
def from_dict(cls, di: JSONLike) -> AttributeDict: """Get back attribute dict.""" if not isinstance(di, dict): raise ValueError("No dict provided.") # pragma: nocover processed_dict = { cls._valid_key(key): cls._add_hexbytes(value) for key, value in di.items() } return AttributeDict(processed_dict)
def dict_attribute(self, data: dict) -> AttributeDict: for key, value in data.items(): if isinstance(value, str): data[key] = self.str_attribute(data[key]) elif isinstance(value, dict): data[key] = self.dict_attribute(data[key]) elif isinstance(value, list): data[key] = self.list_attribute(data[key]) return AttributeDict(data)
def new_ddo(account, web3, name, ddo=None): _ddo = ddo if ddo else ddo_event_sample.copy() if 'publicKey' not in _ddo or not _ddo['publicKey']: _ddo['publicKey'] = [{'owner': ''}] _ddo['publicKey'][0]['owner'] = account.address _ddo['random'] = str(uuid.uuid4()) dt_address = deploy_datatoken(web3, account.privateKey, name, name, account.address) _ddo['id'] = new_did(dt_address) _ddo['dataToken'] = dt_address return AttributeDict(_ddo)
def test_attributedict_dict_in_list_in_dict(): data = {'instructions': [ 0, 1, 'neither shalt thou count, excepting that thou then proceedeth to three', {'if_naughty': 'snuff it'}, 'shalt thou not count', 'right out', ]} attrdict = AttributeDict.recursive(data) assert attrdict.instructions[3].if_naughty == 'snuff it'
def middleware(method, params): response = make_request(method, params) if 'result' in response: result = response['result'] if is_dict(result) and not isinstance(result, AttributeDict): return assoc(response, 'result', AttributeDict.recursive(result)) else: return response else: return response
def test_produce_mpe_events_from_blockchain(self, mock_get_current_block_no, mock_last_block_number, mock_get_contract_instance): mpe_event_producer = MPEEventProducer(infura_endpoint, Repository(NETWORKS)) event_repository = EventRepository(Repository(NETWORKS)) deposit_fund_Event_object = Mock() deposit_fund_Event_object.createFilter = Mock( return_value=Mock(get_all_entries=Mock(return_value=[AttributeDict( {'args': AttributeDict({'sender': '0xabd2cCb3828b4428bBde6C2031A865b0fb272a5A', 'amount': 30000000}), 'event': 'DepositFunds', 'logIndex': 1, 'transactionIndex': 18, 'transactionHash': HexBytes('0x562cc2fa59d9c7a4aa56106a19ad9c8078a95ae68416619fc191d86c50c91f12'), 'address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C', 'blockHash': HexBytes('0xe06042a4d471351c0ee9e50056bd4fb6a0e158b2489ba70775d3c06bd29da19b'), 'blockNumber': 6286405})]))) mock_last_block_number.return_value = 50 mock_get_current_block_no.return_value = 50 # Testing contract events mock_get_contract_instance.return_value = Mock( events=Mock(DepositFunds=deposit_fund_Event_object, abi=[{"type": "event", "name": "DepositFunds"}])) blockchain_events = mpe_event_producer.produce_event(3) assert blockchain_events == [AttributeDict( {'args': AttributeDict({'sender': '0xabd2cCb3828b4428bBde6C2031A865b0fb272a5A', 'amount': 30000000}), 'event': 'DepositFunds', 'logIndex': 1, 'transactionIndex': 18, 'transactionHash': HexBytes('0x562cc2fa59d9c7a4aa56106a19ad9c8078a95ae68416619fc191d86c50c91f12'), 'address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C', 'blockHash': HexBytes('0xe06042a4d471351c0ee9e50056bd4fb6a0e158b2489ba70775d3c06bd29da19b'), 'blockNumber': 6286405})] # Testing Airdrop events airdrop_event_producer = AirdropEventProducer(infura_endpoint, Repository(NETWORKS)) blockchain_events = airdrop_event_producer.produce_event(3) assert blockchain_events == [AttributeDict( {'args': AttributeDict({'sender': '0xabd2cCb3828b4428bBde6C2031A865b0fb272a5A', 'amount': 30000000}), 'event': 'DepositFunds', 'logIndex': 1, 'transactionIndex': 18, 'transactionHash': HexBytes('0x562cc2fa59d9c7a4aa56106a19ad9c8078a95ae68416619fc191d86c50c91f12'), 'address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C', 'blockHash': HexBytes('0xe06042a4d471351c0ee9e50056bd4fb6a0e158b2489ba70775d3c06bd29da19b'), 'blockNumber': 6286405})] # Testing Occam Airdrop events occam_airdrop_event_producer = OccamAirdropEventProducer(infura_endpoint, Repository(NETWORKS)) blockchain_events = occam_airdrop_event_producer.produce_event(3) assert blockchain_events == [AttributeDict( {'args': AttributeDict({'sender': '0xabd2cCb3828b4428bBde6C2031A865b0fb272a5A', 'amount': 30000000}), 'event': 'DepositFunds', 'logIndex': 1, 'transactionIndex': 18, 'transactionHash': HexBytes('0x562cc2fa59d9c7a4aa56106a19ad9c8078a95ae68416619fc191d86c50c91f12'), 'address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C', 'blockHash': HexBytes('0xe06042a4d471351c0ee9e50056bd4fb6a0e158b2489ba70775d3c06bd29da19b'), 'blockNumber': 6286405})]
def new_ddo(account, web3, name, ddo=None): _ddo = ddo if ddo else ddo_event_sample_v4.copy() if "publicKey" not in _ddo or not _ddo["publicKey"]: _ddo["publicKey"] = [{"owner": ""}] _ddo["publicKey"][0]["owner"] = account.address _ddo["random"] = str(uuid.uuid4()) dt_address = deploy_datatoken(web3, account, name, name) chain_id = web3.eth.chain_id _ddo["id"] = make_did(dt_address, chain_id) _ddo["chainId"] = chain_id _ddo["nftAddress"] = dt_address return AttributeDict(_ddo)
def test_parse_rewards_no_fee(web3, eth_bridge_contract): # Fees are updated to flat fee event = AttributeDict({ 'address': '0xC0E7A7FfF4aBa5e7286D5d67dD016B719DCc9156', 'args': { '_amount': 24000000000000000, '_calculatedDecimals': 18, '_calculatedGranularity': 1, '_decimals': 18, '_formattedAmount': 24000000000000000, '_granularity': 1, '_to': '0xC855FD4aF3526215d37b39Cc33fa3C352d42e6F8', '_tokenAddress': '0xa1F7EfD2B12aBa416f1c57b9a54AC92B15C3A792', '_userData': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' b'\x02c\x8d$\xe6\x96\xc7\xff\xf8\xed\x05`:\xfcU\xc5' b'{\xf2\xc7\xf0' }, 'blockHash': HexBytes( '0x0a4b2f06dc5cad428fe869a3222f548b4929862f8342c0c802b5e62a5e1c671c' ), 'blockNumber': 1854123, 'event': 'AcceptedCrossTransfer', 'logIndex': 6, 'transactionHash': HexBytes( '0x5edd6194a4caa53e5fb0e7ec1e2396486eb0b25274728832a9b5819c6faa58c7' ), 'transactionIndex': 7 }) deposits = parse_deposits_from_events( web3=web3, bridge_contract=eth_bridge_contract, events=[event], fee_percentage=Decimal('0.0'), ) assert len(deposits) == 1 deposit = deposits[0] assert deposit.amount_decimal == Decimal('0.024')
def test_check_metadata_proofs(monkeypatch): # empty env var => everything is validated monkeypatch.delenv("ALLOWED_VALIDATORS", None) assert check_metadata_proofs(None, "whatever_it_works") # wrong env var => nothing is validated monkeypatch.setenv("ALLOWED_VALIDATORS", "not a json") assert not check_metadata_proofs(None, "whatever_it_works") config_file = app.config["AQUARIUS_CONFIG_FILE"] web3 = setup_web3(config_file) random_addresses = [] random_dicts = [] for i in range(5): random_address = web3.eth.account.create().address random_addresses.append(random_address) random_dicts.append( AttributeDict({"args": AttributeDict({"validator": random_address})}) ) monkeypatch.setenv( "ALLOWED_VALIDATORS", json.dumps([random_addresses[0], random_addresses[1]]) ) assert check_metadata_proofs(web3, [random_dicts[0]]) assert check_metadata_proofs(web3, [random_dicts[1]]) assert not check_metadata_proofs(web3, [random_dicts[2]]) assert not check_metadata_proofs(web3, [random_dicts[2], random_dicts[3]]) assert check_metadata_proofs(web3, [random_dicts[0], random_dicts[3]]) assert check_metadata_proofs(web3, [random_dicts[0], random_dicts[0]]) # no metadata proofs set assert not check_metadata_proofs(web3, []) assert not check_metadata_proofs(web3, []) # no validators set monkeypatch.setenv("ALLOWED_VALIDATORS", json.dumps([])) assert check_metadata_proofs(web3, [random_dicts[4]])
def test_deposit_with_right_amount_logs_event(testenv): testenv.register_all_depositors() testenv.deposit(3456, len(testenv.depositors) * 3456) events = testenv.deposit_locker.events.Deposit.createFilter( fromBlock=0 ).get_all_entries() assert len(events) == 1 assert events[0].args == AttributeDict( { "totalValue": 3456 * len(testenv.depositors), "valuePerDepositor": 3456, "numberOfDepositors": len(testenv.depositors), } )
def main(): data = fetch_data() data = AttributeDict.recursive(data) print('The Fundamental Equation of Dai') print(f"{data.eth_ilk.Art * data.eth_ilk.rate:,.0f} + {data.bat_ilk.Art * data.bat_ilk.rate:,.0f} + {data.sai_ilk.Art:,.0f} + {data.vice:,.0f} = {data.debt:,.0f}") print('(Dai from ETH + Dai from BAT + Dai from Sai + System Debt) = Total Dai') print() print(f'Total Dai: {data.debt:,.0f}') print(f'Total Sai: {data.sai_supply:,.0f}') print(f'Dai + Sai: {data.debt + data.sai_supply:,.0f}') print(f'Total Chai: {data.chai_supply:,.0f}') print() print(f'Dai from ETH: {data.eth_ilk.Art * data.eth_ilk.rate:,.0f} ({data.eth_ilk.Art * data.eth_ilk.rate / data.debt:.2%})') print(f'Dai from BAT: {data.bat_ilk.Art * data.bat_ilk.rate:,.0f} ({data.bat_ilk.Art * data.bat_ilk.rate / data.debt:.2%})') print(f'Dai from SAI: {data.sai_ilk.Art * data.sai_ilk.rate:,.0f} ({data.sai_ilk.Art * data.sai_ilk.rate / data.debt:.2%})') print() print(f'ETH Locked: {data.eth_locked:,.0f}') # eth_supply missing print(f'ETH Ceiling: {data.eth_ilk.line:,.0f} Dai ({data.eth_ilk.Art * data.eth_ilk.rate / data.eth_ilk.line:.2%} util.)') print(f'ETH Stability Fee: {data.eth_fee:.2f}%') print() print(f'BAT Locked: {data.bat_locked:,.0f} ({data.bat_locked / data.bat_supply:.2%} supply)') print(f'BAT Ceiling: {data.bat_ilk.line:,.0f} Dai ({data.bat_ilk.Art * data.bat_ilk.rate / data.bat_ilk.line:.2%} util.)') print(f'BAT Stability Fee: {data.bat_fee:.2f}%') print() print(f'Dai (ERC20) Supply: {data.dai_supply:,.0f} ({data.dai_supply / data.debt:.2%})') print(f'Dai in DSR: {data.savings_dai:,.0f} ({data.savings_dai / data.debt:.2%})') print(f'Pie in DSR: {data.savings_pie:,.0f}') print(f'Dai Savings Rate: {data.pot_fee:.2f}%') print() print(f'ETH Price: ${data.eth_price:,.2f}') print(f'BAT Price: ${data.bat_price:,.4f}') print(f'Collat. Ratio: {data.sys_locked / data.debt:,.2%}') print(f'Total Locked: ${data.sys_locked:,.0f}') print() print(f'System Surplus: {data.sys_surplus:,.0f} Dai') print(f'Surplus Buffer: {data.surplus_buffer:,.0f}') print() print(f'Debt available to heal: {data.sys_debt:,.0f} Dai') print(f'Debt Buffer: {data.debt_size:,.0f}') print() print(f'Vaults Opened: {data.cdps:,d}') print() print(f'ETH Vault Auctions: {data.eth_kicks:,d}') print(f'BAT Vault Auctions: {data.bat_kicks:,d}') print() print(f'MKR Supply: {data.mkr_supply:,.2f}') print(f'MKR in Burner: {data.gem_pit:,.2f}') print() print(f'Dai in Uniswap: {data.uniswap_dai:,.0f}')
def test_attributedict_sequence_with_dict(sequence): data = sequence(['a', {'found': True}, 'c']) dict_in_sequence = AttributeDict.recursive(data) assert dict_in_sequence[1].found is True
def test_attributedict_setattr_invalid(): container = AttributeDict({'a': 1}) with pytest.raises(TypeError): container.a = 0 assert container.a == 1
def test_attributedict_set_in_recursive_dict(): data = {'mydict': {'myset': {'found'}}} attrdict = AttributeDict.recursive(data) assert 'found' in attrdict.mydict.myset
def get_event_data(event_abi, log_entry): """ Given an event ABI and a log entry for that event, return the decoded event data """ if event_abi['anonymous']: log_topics = log_entry['topics'] elif not log_entry['topics']: raise MismatchedABI("Expected non-anonymous event to have 1 or more topics") elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]: raise MismatchedABI("The event signature did not match the provided ABI") else: log_topics = log_entry['topics'][1:] log_topics_abi = get_indexed_event_inputs(event_abi) log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi) log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs) log_topic_names = get_abi_input_names({'inputs': log_topics_abi}) if len(log_topics) != len(log_topic_types): raise ValueError("Expected {0} log topics. Got {1}".format( len(log_topic_types), len(log_topics), )) log_data = hexstr_if_str(to_bytes, log_entry['data']) log_data_abi = exclude_indexed_event_inputs(event_abi) log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) log_data_names = get_abi_input_names({'inputs': log_data_abi}) # sanity check that there are not name intersections between the topic # names and the data argument names. duplicate_names = set(log_topic_names).intersection(log_data_names) if duplicate_names: raise ValueError( "Invalid Event ABI: The following argument names are duplicated " "between event inputs: '{0}'".format(', '.join(duplicate_names)) ) decoded_log_data = decode_abi(log_data_types, log_data) normalized_log_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_data_types, decoded_log_data ) decoded_topic_data = [ decode_single(topic_type, topic_data) for topic_type, topic_data in zip(log_topic_types, log_topics) ] normalized_topic_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_topic_types, decoded_topic_data ) event_args = dict(itertools.chain( zip(log_topic_names, normalized_topic_data), zip(log_data_names, normalized_log_data), )) event_data = { 'args': event_args, 'event': event_abi['name'], 'logIndex': log_entry['logIndex'], 'transactionIndex': log_entry['transactionIndex'], 'transactionHash': log_entry['transactionHash'], 'address': log_entry['address'], 'blockHash': log_entry['blockHash'], 'blockNumber': log_entry['blockNumber'], } return AttributeDict.recursive(event_data)
def test_attributedict_recursive_dict(): w = AttributeDict.recursive({'x': {'y': {'z': 8}}}) assert w.x.y.z == 8
class EventFilterBuilder: formatter = None _fromBlock = None _toBlock = None _address = None _immutable = False def __init__(self, event_abi, formatter=None): self.event_abi = event_abi self.formatter = formatter self.event_topic = initialize_event_topics(self.event_abi) self.args = AttributeDict( _build_argument_filters_from_event_abi(event_abi)) self._ordered_arg_names = tuple(arg['name'] for arg in event_abi['inputs']) @property def fromBlock(self): return self._fromBlock @fromBlock.setter def fromBlock(self, value): if self._fromBlock is None and not self._immutable: self._fromBlock = value else: raise ValueError( "fromBlock is already set to {0}. " "Resetting filter parameters is not permitted".format(self._fromBlock)) @property def toBlock(self): return self._toBlock @toBlock.setter def toBlock(self, value): if self._toBlock is None and not self._immutable: self._toBlock = value else: raise ValueError( "toBlock is already set to {0}. " "Resetting filter parameters is not permitted".format(self._toBlock)) @property def address(self): return self._address @address.setter def address(self, value): if self._address is None and not self._immutable: self._address = value else: raise ValueError( "address is already set to {0}. " "Resetting filter parameters is not permitted".format(self.address)) @property def ordered_args(self): return tuple(map(self.args.__getitem__, self._ordered_arg_names)) @property @to_tuple def indexed_args(self): return tuple(filter(is_indexed, self.ordered_args)) @property @to_tuple def data_args(self): return tuple(filter(is_not_indexed, self.ordered_args)) @property def topics(self): arg_topics = tuple(arg.match_values for arg in self.indexed_args) return normalize_topic_list(cons(to_hex(self.event_topic), arg_topics)) @property def data_argument_values(self): if self.data_args is not None: return tuple(arg.match_values for arg in self.data_args) else: return (None,) @property def filter_params(self): params = { "topics": self.topics, "fromBlock": self.fromBlock, "toBlock": self.toBlock, "address": self.address } return valfilter(lambda x: x is not None, params) def deploy(self, w3): if not isinstance(w3, web3.Web3): raise ValueError("Invalid web3 argument: got: {0}".format(repr(w3))) for arg in self.args.values(): arg._immutable = True self._immutable = True log_filter = w3.eth.filter(self.filter_params) log_filter.filter_params = self.filter_params log_filter.set_data_filters(self.data_argument_values) log_filter.builder = self if self.formatter is not None: log_filter.log_entry_formatter = self.formatter return log_filter