def test_get_formatters_default_formatter_for_falsy_config(): method = Method(mungers=[], json_rpc_method='eth_method', formatter_lookup_fn='') default_input_formatters, default_output_formatters = method.get_formatters( '') assert pipe(['a', 'b', 'c'], *default_input_formatters) == ['a', 'b', 'c'] assert pipe(['a', 'b', 'c'], *default_output_formatters) == ['a', 'b', 'c']
def build(obj: Any, *applicators: Callable[..., Any]) -> Any: """ Run the provided object through the series of applicator functions. If ``obj`` is an instances of :class:`~eth.chains.base.BaseChain` the applicators will be run on a copy of the chain and thus will not mutate the provided chain instance. """ if isinstance(obj, ChainAPI): return pipe(obj, copy(), *applicators) else: return pipe(obj, *applicators)
def apply_error_formatters( error_formatters: Callable[..., Any], response: Optional[RPCResponse]=None, params: Optional[Any]=None, ) -> RPCResponse: if 'error' in response and error_formatters: formatted_response = pipe(response, error_formatters) return formatted_response elif 'result' in response.keys() and response['result'] is None and error_formatters: formatted_response = pipe(params, error_formatters) return formatted_response else: return response
def process_bytecode(link_refs: Dict[str, Any], bytecode: bytes) -> HexStr: """ Replace link_refs in bytecode with 0's. """ all_offsets = [y for x in link_refs.values() for y in x.values()] # Link ref validation. validate_link_ref_fns = (validate_link_ref(ref["start"] * 2, ref["length"] * 2) for ref in concat(all_offsets)) pipe(bytecode, *validate_link_ref_fns) # Convert link_refs in bytecode to 0's link_fns = (replace_link_ref_in_bytecode(ref["start"] * 2, ref["length"] * 2) for ref in concat(all_offsets)) processed_bytecode = pipe(bytecode, *link_fns) return add_0x_prefix(processed_bytecode)
def _apply_request_formatters( params: Any, request_formatters: Dict[RPCEndpoint, Callable[..., TReturn]] ) -> Any: if request_formatters: formatted_params = pipe(params, request_formatters) return formatted_params return params
def test_chain_builder_initialize_chain_default(chain_class): chain = pipe( chain_class, genesis(), ) header = chain.get_canonical_head() assert header == chain.get_canonical_block_by_number(0).header assert header.parent_hash == constants.GENESIS_PARENT_HASH assert header.uncles_hash == constants.EMPTY_UNCLE_HASH assert header.coinbase == constants.GENESIS_COINBASE assert header.state_root == constants.BLANK_ROOT_HASH assert header.transaction_root == constants.BLANK_ROOT_HASH assert header.receipt_root == constants.BLANK_ROOT_HASH assert header.bloom == 0 assert header.difficulty == 1 assert header.block_number == constants.GENESIS_BLOCK_NUMBER assert header.gas_limit == constants.GENESIS_GAS_LIMIT assert header.gas_used == 0 # account for runtime. should run in less than few seconds and should be # effectively "now" assert abs(header.timestamp - time.time()) < 2 assert header.extra_data == constants.GENESIS_EXTRA_DATA assert header.mix_hash == constants.GENESIS_MIX_HASH assert header.nonce == constants.GENESIS_NONCE
def test_chain_builder_initialize_chain_with_params(chain_class): chain = pipe(chain_class, genesis(params={'difficulty': 12345}, )) header = chain.get_canonical_head() assert header == chain.get_canonical_block_by_number(0).header assert header.difficulty == 12345
def serialize_full_transaction(transaction, block, transaction_index, is_pending): if is_pending: block_number = None block_hash = None transaction_index = None else: block_number = block['number'] block_hash = block['hash'] serialized_transaction = pipe( transaction, partial(assoc, key='block_number', value=block_number), partial(assoc, key='block_hash', value=block_hash), partial(assoc, key='transaction_index', value=transaction_index), partial(assoc, key='type', value=extract_transaction_type(transaction))) if 'gas_price' in transaction: return serialized_transaction else: # TODO: Sometime in 2022 the inclusion of gas_price may be removed from dynamic fee # transactions and we can get rid of this behavior. # https://github.com/ethereum/execution-specs/pull/251 gas_price = (transaction['max_fee_per_gas'] if is_pending else calculate_effective_gas_price(transaction, block)) return assoc(serialized_transaction, 'gas_price', gas_price)
def apply_error_formatters(error_formatters: Callable[..., Any], response: RPCResponse) -> RPCResponse: if 'error' in response and error_formatters: formatted_response = pipe(response, error_formatters) return formatted_response else: return response
def find_matching_fn_abi(abi, abi_codec, fn_identifier=None, args=None, kwargs=None): args = args or tuple() kwargs = kwargs or dict() num_arguments = len(args) + len(kwargs) if fn_identifier is FallbackFn: return get_fallback_func_abi(abi) if not is_text(fn_identifier): raise TypeError("Unsupported function identifier") name_filter = functools.partial(filter_by_name, fn_identifier) arg_count_filter = functools.partial(filter_by_argument_count, num_arguments) encoding_filter = functools.partial(filter_by_encodability, abi_codec, args, kwargs) function_candidates = pipe(abi, name_filter, arg_count_filter, encoding_filter) if len(function_candidates) == 1: return function_candidates[0] else: matching_identifiers = name_filter(abi) matching_function_signatures = [ abi_to_signature(func) for func in matching_identifiers ] arg_count_matches = len(arg_count_filter(matching_identifiers)) encoding_matches = len(encoding_filter(matching_identifiers)) if arg_count_matches == 0: diagnosis = "\nFunction invocation failed due to improper number of arguments." elif encoding_matches == 0: diagnosis = "\nFunction invocation failed due to no matching argument types." elif encoding_matches > 1: diagnosis = ( "\nAmbiguous argument encoding. " "Provided arguments can be encoded to multiple functions matching this call." ) message = ( "\nCould not identify the intended function with name `{name}`, " "positional argument(s) of type `{arg_types}` and " "keyword argument(s) of type `{kwarg_types}`." "\nFound {num_candidates} function(s) with the name `{name}`: {candidates}" "{diagnosis}").format( name=fn_identifier, arg_types=tuple(map(type, args)), kwarg_types=valmap(type, kwargs), num_candidates=len(matching_identifiers), candidates=matching_function_signatures, diagnosis=diagnosis, ) raise ValidationError(message)
def apply_error_formatters(error_formatters, response): if 'error' in response and error_formatters: formatted_response = pipe(response, error_formatters) return formatted_response else: return response
def process_params(self, module, *args, **kwargs): # takes in input params, steps 1-3 params, method, (req_formatters, ret_formatters) = _pipe_and_accumulate( (module, args, kwargs,), [self.input_munger, self.method_selector_fn, self.get_formatters]) return (method, pipe(params, *req_formatters)), ret_formatters
def apply_result_formatters(result_formatters: Callable[..., Any], result: RPCResponse) -> RPCResponse: if result_formatters: formatted_result = pipe(result, result_formatters) return formatted_result else: return result
def map_abi_data( normalizers: Sequence[Callable[[TypeStr, Any], Tuple[TypeStr, Any]]], types: Sequence[TypeStr], data: Sequence[Any], ) -> Any: """ This function will apply normalizers to your data, in the context of the relevant types. Each normalizer is in the format: def normalizer(datatype, data): # Conditionally modify data return (datatype, data) Where datatype is a valid ABI type string, like "uint". In case of an array, like "bool[2]", normalizer will receive `data` as an iterable of typed data, like `[("bool", True), ("bool", False)]`. Internals --- This is accomplished by: 1. Decorating the data tree with types 2. Recursively mapping each of the normalizers to the data 3. Stripping the types back out of the tree """ pipeline = itertools.chain( [abi_data_tree(types)], map(data_tree_map, normalizers), [partial(recursive_map, strip_abi_type)], ) return pipe(data, *pipeline)
def find_matching_fn_abi( abi: ABI, abi_codec: ABICodec, fn_identifier: Optional[Union[str, Type[FallbackFn], Type[ReceiveFn]]] = None, args: Optional[Sequence[Any]] = None, kwargs: Optional[Any] = None, ) -> ABIFunction: args = args or tuple() kwargs = kwargs or dict() num_arguments = len(args) + len(kwargs) if fn_identifier is FallbackFn: return get_fallback_func_abi(abi) if fn_identifier is ReceiveFn: return get_receive_func_abi(abi) if not is_text(fn_identifier): raise TypeError("Unsupported function identifier") name_filter = functools.partial(filter_by_name, fn_identifier) arg_count_filter = functools.partial(filter_by_argument_count, num_arguments) encoding_filter = functools.partial(filter_by_encodability, abi_codec, args, kwargs) function_candidates = pipe(abi, name_filter, arg_count_filter, encoding_filter) if len(function_candidates) == 1: return function_candidates[0] else: matching_identifiers = name_filter(abi) matching_function_signatures = [ abi_to_signature(func) for func in matching_identifiers ] arg_count_matches = len(arg_count_filter(matching_identifiers)) encoding_matches = len(encoding_filter(matching_identifiers)) if arg_count_matches == 0: diagnosis = "\nFunction invocation failed due to improper number of arguments." elif encoding_matches == 0: diagnosis = "\nFunction invocation failed due to no matching argument types." elif encoding_matches > 1: diagnosis = ( "\nAmbiguous argument encoding. " "Provided arguments can be encoded to multiple functions matching this call." ) message = ( f"\nCould not identify the intended function with name `{fn_identifier}`, positional " f"argument(s) of type `{tuple(map(type, args))}` and keyword argument(s) of type " f"`{valmap(type, kwargs)}`.\nFound {len(matching_identifiers)} function(s) with " f"the name `{fn_identifier}`: {matching_function_signatures}{diagnosis}" ) raise ValidationError(message)
def apply_error_formatters( error_formatters: Callable[..., Any], response: RPCResponse, ) -> RPCResponse: if error_formatters: formatted_resp = pipe(response, error_formatters) return formatted_resp else: return response
def test_chain_builder_initialize_chain_with_state_simple(chain_class): chain = pipe(chain_class, genesis(state=((ADDRESS_A, 'balance', 1), ), )) header = chain.get_canonical_head() assert header == chain.get_canonical_block_by_number(0).header assert header.state_root != constants.BLANK_ROOT_HASH state = chain.get_vm().state assert state.get_balance(ADDRESS_A) == 1
def apply_null_result_formatters( null_result_formatters: Callable[..., Any], response: RPCResponse, params: Optional[Any] = None, ) -> RPCResponse: if null_result_formatters: formatted_resp = pipe(params, null_result_formatters) return formatted_resp else: return response
def apply_all_link_refs(bytecode: bytes, link_refs: List[Dict[str, Any]], attr_dict: Dict[str, str]) -> bytes: """ Applies all link references corresponding to a valid attr_dict to the bytecode. """ if link_refs is None: return bytecode link_fns = (apply_link_ref(offset, ref["length"], attr_dict[ref["name"]]) for ref in link_refs for offset in ref["offsets"]) linked_bytecode = pipe(bytecode, *link_fns) return linked_bytecode
def hash(self): """ :returns: the hash of the encoded bytestring :rtype: ~hexbytes.main.HexBytes """ return pipe( self, rlp.encode, keccak, HexBytes, )
def middleware(method: RPCEndpoint, params: Any) -> RPCResponse: # TODO send call to eth-tester without gas, and remove guess_gas entirely if method == 'eth_call': filled_transaction = pipe( params[0], fill_default_from, fill_default_gas, ) return make_request(method, [filled_transaction] + params[1:]) elif method in ( 'eth_estimateGas', 'eth_sendTransaction', ): filled_transaction = pipe( params[0], fill_default_from, ) return make_request(method, [filled_transaction] + params[1:]) else: return make_request(method, params)
def _ecpairing(data: BytesOrView) -> bool: exponent = bn128.FQ12.one() processing_pipeline = ( _process_point(data[start_idx:start_idx + 192]) for start_idx in range(0, len(data), 192) ) exponent = pipe(bn128.FQ12.one(), *processing_pipeline) result = bn128.final_exponentiate(exponent) == bn128.FQ12.one() return result
def persistent(self) -> "HashTree": if not self.is_dirty(): return self.original_hash_tree else: setters = (partial(set_chunk_in_tree, index=index, chunk=chunk) for index, chunk in self.updated_chunks.items()) appenders = (partial(append_chunk_to_tree, chunk=chunk) for chunk in self.appended_chunks) raw_hash_tree = pipe(self.original_hash_tree.raw_hash_tree, *setters, *appenders) return self.original_hash_tree.__class__( raw_hash_tree, self.original_hash_tree.chunk_count)
def middleware(method: RPCEndpoint, params: Any) -> RPCResponse: if method in ( 'eth_call', 'eth_estimateGas', 'eth_sendTransaction', ): filled_transaction = pipe( params[0], fill_default_from, ) return make_request(method, [filled_transaction] + list(params)[1:]) else: return make_request(method, params)
def update_elements_in_chunk(original_chunk: Hash32, updated_elements: Dict[int, bytes]) -> Hash32: """Update multiple elements in a chunk. The set of updates is given by a dictionary mapping indices to elements. The items of the dictionary will be passed one by one to `update_element_in_chunk`. """ return pipe( original_chunk, *(functools.partial(update_element_in_chunk, index=index, element=element) for index, element in updated_elements.items()), )
def input_munger(self, val): try: module, args, kwargs = val except TypeError: raise ValueError("input_munger expects a 3-tuple") # TODO: Create friendly error output. mungers_iter = iter(self.mungers) root_munger = next(mungers_iter) munged_inputs = pipe( root_munger(module, *args, **kwargs), *map(lambda m: _munger_star_apply(functools.partial(m, module)), mungers_iter)) return munged_inputs
def _prune_forward(self, root_id: TTaskID, depth: int) -> Tuple[TTaskID]: """ Prune all forks forward from the root """ def prune_parent(prune_task_id: TTaskID) -> Set[TTaskID]: children = self._dependencies.pop(prune_task_id, set()) del self._tasks[prune_task_id] if prune_task_id in self._declared_finished: self._declared_finished.remove(prune_task_id) return children prune_parent_list = compose(tuple, curry(mapcat)(prune_parent)) prune_trunk = repeat(prune_parent_list, depth) return pipe((root_id, ), *prune_trunk)
def test_chain_builder_initialize_chain_with_state_multiple(chain_class): chain = pipe( chain_class, genesis(state=((ADDRESS_A, 'balance', 1), (ADDRESS_B, 'balance', 2)), )) header = chain.get_canonical_head() assert header == chain.get_canonical_block_by_number(0).header assert header.state_root != constants.BLANK_ROOT_HASH account_db = chain.get_vm().state.account_db assert account_db.get_balance(ADDRESS_A) == 1 assert account_db.get_balance(ADDRESS_B) == 2
def aggregate_votes( bitfield: Bitfield, sigs: Sequence[BLSSignature], voting_sigs: Sequence[BLSSignature], attesting_indices: Sequence[ValidatorIndex] ) -> Tuple[Bitfield, BLSSignature]: """ Aggregate the votes. """ # Update the bitfield and append the signatures sigs = tuple(sigs) + tuple(voting_sigs) bitfield = pipe( bitfield, *(set_voted(index=committee_index) for committee_index in attesting_indices)) return bitfield, bls.aggregate_signatures(sigs)
def serialize_receipt(transaction, block, transaction_index, is_pending): if is_pending: block_number = None block_hash = None transaction_index = None else: block_number = block['number'] block_hash = block['hash'] return pipe( transaction, partial(assoc, key='block_number', value=block_number), partial(assoc, key='block_hash', value=block_hash), partial(assoc, key='transaction_index', value=transaction_index), partial(assoc, key='state_root', value=b'\x00'), )