Beispiel #1
0
def post_process_compiled_contracts(compiled_contracts):
    for contract_data in compiled_contracts:
        bytecode = contract_data.get('bytecode')

        if is_string(bytecode):
            bytecode_placeholder_locations = find_placeholder_locations(bytecode)
            bytecode_link_references = normalize_placeholder_link_references(
                bytecode_placeholder_locations,
                compiled_contracts,
            )
        else:
            bytecode_link_references = tuple()

        bytecode_runtime = contract_data.get('bytecode_runtime')
        if is_string(bytecode_runtime):
            bytecode_runtime_placeholder_locations = find_placeholder_locations(
                bytecode_runtime,
            )
            bytecode_runtime_link_references = normalize_placeholder_link_references(
                bytecode_runtime_placeholder_locations,
                compiled_contracts,
            )
        else:
            bytecode_runtime_link_references = tuple()

        yield pipe(
            contract_data,
            partial(assoc, key='linkrefs', value=bytecode_link_references),
            partial(assoc, key='linkrefs_runtime', value=bytecode_runtime_link_references),
        )
Beispiel #2
0
def post_process_compiled_contracts(compiled_contracts):
    for contract_data in compiled_contracts:
        bytecode = contract_data.get('bytecode')

        if is_string(bytecode):
            bytecode_placeholder_locations = find_placeholder_locations(bytecode)
            bytecode_link_references = normalize_placeholder_link_references(
                bytecode_placeholder_locations,
                compiled_contracts,
            )
        else:
            bytecode_link_references = tuple()

        bytecode_runtime = contract_data.get('bytecode_runtime')
        if is_string(bytecode_runtime):
            bytecode_runtime_placeholder_locations = find_placeholder_locations(
                bytecode_runtime,
            )
            bytecode_runtime_link_references = normalize_placeholder_link_references(
                bytecode_runtime_placeholder_locations,
                compiled_contracts,
            )
        else:
            bytecode_runtime_link_references = tuple()

        yield pipe(
            contract_data,
            partial(assoc, key='linkrefs', value=bytecode_link_references),
            partial(assoc, key='linkrefs_runtime', value=bytecode_runtime_link_references),
        )
Beispiel #3
0
def normalize_compilation_result(compilation_result):
    for key_from_compiler, raw_contract_data in compilation_result.items():
        contract_data = normalize_combined_json_contract_data(raw_contract_data)
        source_path, contract_name = normalize_combined_json_contract_key(
            key_from_compiler,
            contract_data,
        )
        yield pipe(
            contract_data,
            partial(assoc, key='source_path', value=source_path),
            partial(assoc, key='name', value=contract_name),
        )
Beispiel #4
0
def normalize_compilation_result(compilation_result):
    for key_from_compiler, raw_contract_data in compilation_result.items():
        contract_data = normalize_combined_json_contract_data(raw_contract_data)
        source_path, contract_name = normalize_combined_json_contract_key(
            key_from_compiler,
            contract_data,
        )
        yield pipe(
            contract_data,
            partial(assoc, key='source_path', value=source_path),
            partial(assoc, key='name', value=contract_name),
        )
Beispiel #5
0
def normalize_compilation_result(compilation_result):
    """
    Take the result from the --standard-json compilation and flatten it into an
    interable of contract data dictionaries.
    """
    for source_path, file_contracts in compilation_result['contracts'].items():
        for contract_name, raw_contract_data in file_contracts.items():
            contract_data = normalize_standard_json_contract_data(raw_contract_data)
            yield pipe(
                contract_data,
                partial(assoc, key='source_path', value=source_path),
                partial(assoc, key='name', value=contract_name),
            )
def serialize_full_transaction(transaction, block, transaction_index, is_pending):
    if is_pending:
        block_number = None
        block_hash = None
        transaction_index = None
    else:
        block_number = block['number']
        block_hash = block['hash']

    return pipe(
        transaction,
        partial(assoc, key='block_number', value=block_number),
        partial(assoc, key='block_hash', value=block_hash),
        partial(assoc, key='transaction_index', value=transaction_index),
    )
Beispiel #7
0
 def mine_blocks(self, num_blocks=1, coinbase=None):
     for _ in range(num_blocks):
         block_to_mine = dissoc(self.block, 'hash')
         block_hash = fake_rlp_hash(block_to_mine)
         mined_block = assoc(block_to_mine, 'hash', block_hash)
         assign_block_info = compose(
             partial(assoc, key='block_number',
                     value=mined_block['number']),
             partial(assoc, key='block_hash', value=mined_block['hash']),
         )
         mined_block['transactions'] = tuple(
             assign_block_info(transaction)
             for transaction in mined_block['transactions'])
         self.blocks.append(mined_block)
         self.block = make_block_from_parent(mined_block)
         yield block_hash
Beispiel #8
0
def _reduce_middleware_fn(request_fn, middleware, web3, request_id):
    """
    The reduce function for wrapping the provider request in the middlewares.
    """
    return partial(
        middleware(request_fn, web3),
        request_id=request_id,
    )
Beispiel #9
0
def add_full_dependencies_to_compiled_contracts(compiled_contracts):
    dependency_graph = compute_direct_dependency_graph(compiled_contracts)
    deploy_order = compute_deploy_order(dependency_graph)

    for contract_data in compiled_contracts:
        full_dependencies = compute_recursive_contract_dependencies(
            contract_data['name'],
            dependency_graph,
        )
        ordered_full_dependencies = tuple(
            contract_name for contract_name in deploy_order
            if contract_name in full_dependencies)
        yield pipe(
            contract_data,
            partial(assoc, key='full_dependencies', value=full_dependencies),
            partial(assoc,
                    key='ordered_full_dependencies',
                    value=ordered_full_dependencies),
        )
Beispiel #10
0
def validate_unique(values):
    if not isdistinct(values):
        duplicates = pipe(
            values,
            frequencies,  # get the frequencies
            partial(valfilter,
                    lambda v: v > 1),  # filter to ones that occure > 1
            sorted,  # sort them
            tuple,  # cast them to an immutiable form
        )
        raise ValidationError(
            "The values provided are not unique.  Duplicates: {0}".format(
                ', '.join((str(value) for value in duplicates))))
Beispiel #11
0
def link_bytecode_by_name(bytecode, link_references, **link_names_and_values):
    """
    Helper function for linking bytecode with a mapping of link reference names
    to their values.

    TODO: fix this as it now needs access to the source file paths which isn't ideal
    """
    link_fn = compose(*(partial(
        insert_link_value,
        value=link_names_and_values[expand_placeholder(
            linkref['name'], link_names_and_values.keys())],
        offset=linkref['start'],
    ) for linkref in link_references))
    linked_bytecode = link_fn(bytecode)
    return linked_bytecode
Beispiel #12
0
def validate_unique(values, title="Value"):
    if not isdistinct(values):
        duplicates = pipe(
            values,
            frequencies,  # get the frequencies
            partial(valfilter, lambda v: v > 1),  # filter to ones that occure > 1
            sorted,  # sort them
            tuple,  # cast them to an immutiable form
        )
        raise ValidationError(
            "{title} does not contain unique items.  Duplicates: {0}".format(
                ', '.join((str(value) for value in duplicates)),
                title=title,
            )
        )
Beispiel #13
0
def validate_unique(values, title="Value"):
    if not isdistinct(values):
        duplicates = pipe(
            values,
            frequencies,  # get the frequencies
            partial(valfilter,
                    lambda v: v > 1),  # filter to ones that occure > 1
            sorted,  # sort them
            tuple,  # cast them to an immutiable form
        )
        raise ValidationError(
            "{title} does not contain unique items.  Duplicates: {0}".format(
                ', '.join((str(value) for value in duplicates)),
                title=title,
            ))
Beispiel #14
0
    def create_log_filter(self,
                          from_block=None,
                          to_block=None,
                          address=None,
                          topics=None):
        self.validator.validate_inbound_filter_params(
            from_block=from_block,
            to_block=to_block,
            address=address,
            topics=topics,
        )
        (
            raw_from_block,
            raw_to_block,
            raw_address,
            raw_topics,
        ) = self.normalizer.normalize_inbound_filter_params(
            from_block=from_block,
            to_block=to_block,
            address=address,
            topics=topics,
        )

        raw_filter_id = next(self._filter_counter)
        raw_filter_params = {
            'from_block': raw_from_block,
            'to_block': raw_to_block,
            'addresses': raw_address,
            'topics': raw_topics,
        }
        filter_fn = partial(check_if_log_matches, **raw_filter_params)
        new_filter = Filter(
            filter_params=raw_filter_params,
            filter_fn=filter_fn,
        )
        self._log_filters[raw_filter_id] = new_filter

        if is_integer(raw_from_block):
            if is_integer(raw_to_block):
                upper_bound = raw_to_block + 1
            else:
                upper_bound = self.get_block_by_number('pending')['number']
            for block_number in range(raw_from_block, upper_bound):
                block = self.get_block_by_number(block_number)
                self._add_log_entries_to_filter(block, new_filter)

        filter_id = self.normalizer.normalize_outbound_filter_id(raw_filter_id)
        return filter_id
Beispiel #15
0
def link_bytecode_by_name(bytecode, link_references, **link_names_and_values):
    """
    Helper function for linking bytecode with a mapping of link reference names
    to their values.

    TODO: fix this as it now needs access to the source file paths which isn't ideal
    """
    link_fn = compose(*(
        partial(
            insert_link_value,
            value=link_names_and_values[
                expand_placeholder(linkref['name'], link_names_and_values.keys())
            ],
            offset=linkref['start'],
        )
        for linkref
        in link_references
    ))
    linked_bytecode = link_fn(bytecode)
    return linked_bytecode
Beispiel #16
0
    def get_logs(self, from_block=None, to_block=None, address=None, topics=None):
        self.validator.validate_inbound_filter_params(
            from_block=from_block,
            to_block=to_block,
            address=address,
            topics=topics,
        )
        (
            raw_from_block,
            raw_to_block,
            raw_address,
            raw_topics,
        ) = self.normalizer.normalize_inbound_filter_params(
            from_block=from_block,
            to_block=to_block,
            address=address,
            topics=topics,
        )

        # Setup the filter object
        raw_filter_params = {
            'from_block': raw_from_block,
            'to_block': raw_to_block,
            'addresses': raw_address,
            'topics': raw_topics,
        }
        filter_fn = partial(
            check_if_log_matches,
            **raw_filter_params,
        )
        log_filter = Filter(
            filter_params=raw_filter_params,
            filter_fn=filter_fn,
        )

        # Set from/to block defaults
        if raw_from_block is None:
            raw_from_block = 'latest'
        if raw_to_block is None:
            raw_to_block = 'latest'

        # Determine lower bound for block range.
        if isinstance(raw_from_block, int):
            lower_bound = raw_from_block
        else:
            lower_bound = self.get_block_by_number(raw_from_block)['number']

        # Determine upper bound for block range.
        if isinstance(raw_to_block, int):
            upper_bound = raw_to_block
        else:
            upper_bound = self.get_block_by_number(raw_to_block)['number']

        # Enumerate the blocks in the block range to find all log entries which match.
        for block_number in range(lower_bound, upper_bound + 1):
            block = self.get_block_by_number(block_number)
            for transaction_hash in block['transactions']:
                receipt = self.get_transaction_receipt(transaction_hash)
                for log_entry in receipt['logs']:
                    raw_log_entry = self.normalizer.normalize_inbound_log_entry(log_entry)
                    log_filter.add(raw_log_entry)

        # Return the matching log entries
        for item in log_filter.get_all():
            yield self.normalizer.normalize_outbound_log_entry(item)
Beispiel #17
0
from __future__ import unicode_literals

from cytoolz.functoolz import (
    compose,
    partial,
)

from eth_utils import (
    int_to_big_endian,
    pad_left,
)

zpad = partial(pad_left, pad_with=b'\x00')
zpad32 = partial(pad_left, to_size=32, pad_with=b'\x00')

int_to_32byte_big_endian = compose(
    zpad32,
    int_to_big_endian,
)
Beispiel #18
0
transaction_params_remapper = apply_key_map(TRANSACTION_PARAMS_MAPPING)


TRANSACTION_PARAMS_FORMATTERS = {
    'gas': to_integer_if_hex,
    'gasPrice': to_integer_if_hex,
    'value': to_integer_if_hex,
}


transaction_params_formatter = apply_formatters_to_dict(TRANSACTION_PARAMS_FORMATTERS)


TRANSACTION_FORMATTERS = {
    'to': apply_formatter_if(partial(operator.eq, b''), static_return(None)),
}


transaction_formatter = apply_formatters_to_dict(TRANSACTION_FORMATTERS)


RECEIPT_FORMATTERS = {
    'logs': apply_formatter_to_array(log_key_remapper),
}


receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS)


ethereum_tester_middleware = construct_formatting_middleware(
Beispiel #19
0
TRANSACTION_NORMALIZERS = {
    'from': to_canonical_address,
    'to': to_empty_or_canonical_address,
    'gas': identity,
    'gas_price': identity,
    'nonce': identity,
    'value': identity,
    'data': decode_hex,
    'r': identity,
    's': identity,
    'v': identity,
}


normalize_transaction = partial(normalize_dict, normalizers=TRANSACTION_NORMALIZERS)


LOG_ENTRY_NORMALIZERS = {
    'type': identity,
    'log_index': identity,
    'transaction_index': identity,
    'transaction_hash': decode_hex,
    'block_hash': partial(normalize_if, conditional_fn=is_string, normalizer=decode_hex),
    'block_number': identity,
    'address': to_canonical_address,
    'data': decode_hex,
    'topics': partial(normalize_array, normalizer=decode_hex),
}

Beispiel #20
0
    is_array_of_strings,
)

from .formatting import (
    construct_formatting_middleware, )


def bytes_to_ascii(value):
    return codecs.decode(value, 'ascii')


to_ascii_if_bytes = apply_formatter_if(is_bytes, bytes_to_ascii)
to_integer_if_hex = apply_formatter_if(is_string, hex_to_integer)
block_number_formatter = apply_formatter_if(is_integer, integer_to_hex)

is_false = partial(operator.is_, False)

is_not_false = complement(is_false)
is_not_null = complement(is_null)


@curry
def to_hexbytes(num_bytes, val, variable_length=False):
    if isinstance(val, (str, int, bytes)):
        result = HexBytes(val)
    else:
        raise TypeError("Cannot convert %r to HexBytes" % val)

    extra_bytes = len(result) - num_bytes
    if extra_bytes == 0 or (variable_length and extra_bytes < 0):
        return result
Beispiel #21
0
import pytest

from cytoolz.functoolz import (
    partial, )

from web3.utils.blocks import (
    select_method_for_block_identifier, )

selector_fn = partial(
    select_method_for_block_identifier,
    if_hash='test_hash',
    if_number='test_number',
    if_predefined='test_predefined',
)


@pytest.mark.parametrize(
    'input,expected',
    (
        ('latest', 'test_predefined'),
        ('pending', 'test_predefined'),
        ('earliest', 'test_predefined'),
        (-1, ValueError),
        (0, 'test_number'),
        (1, 'test_number'),
        (4000000, 'test_number'),
        ('0x0', 'test_number'),
        ('0x00', 'test_number'),
        ('0x1', 'test_number'),
        ('0x01', 'test_number'),
        (hex(4000000), 'test_number'),
Beispiel #22
0
transaction_params_remapper = apply_key_map(TRANSACTION_PARAMS_MAPPING)


TRANSACTION_PARAMS_FORMATTERS = {
    'gas': to_integer_if_hex,
    'gasPrice': to_integer_if_hex,
    'value': to_integer_if_hex,
}


transaction_params_formatter = apply_formatters_to_dict(TRANSACTION_PARAMS_FORMATTERS)


TRANSACTION_FORMATTERS = {
    'to': apply_formatter_if(static_return(None), partial(operator.eq, b'')),
}


transaction_formatter = apply_formatters_to_dict(TRANSACTION_FORMATTERS)


ethereum_tester_middleware = construct_formatting_middleware(
    request_formatters={
        # Eth
        'eth_getBlockByNumber': apply_formatters_to_args(
            apply_formatter_if(to_integer_if_hex, is_not_named_block),
        ),
        'eth_getFilterChanges': apply_formatters_to_args(hex_to_integer),
        'eth_getFilterLogs': apply_formatters_to_args(hex_to_integer),
        'eth_getBlockTransactionCountByNumber': apply_formatters_to_args(
Beispiel #23
0
from .formatting import (
    construct_formatting_middleware,
)


def bytes_to_ascii(value):
    return codecs.decode(value, 'ascii')


to_ascii_if_bytes = apply_formatter_if(is_bytes, bytes_to_ascii)
to_integer_if_hex = apply_formatter_if(is_string, hex_to_integer)
block_number_formatter = apply_formatter_if(is_integer, integer_to_hex)


is_false = partial(operator.is_, False)

is_not_false = complement(is_false)
is_not_null = complement(is_null)


def is_array_of_strings(value):
    if not is_list_like(value):
        return False
    return all((is_string(item) for item in value))


def is_array_of_dicts(value):
    if not is_list_like(value):
        return False
    return all((is_dict(item) for item in value))
import pytest

from cytoolz.functoolz import (
    partial,
)

from web3.utils.blocks import (
    select_method_for_block_identifier,
)


selector_fn = partial(
    select_method_for_block_identifier,
    if_hash='test_hash',
    if_number='test_number',
    if_predefined='test_predefined',
)


@pytest.mark.parametrize(
    'input,expected',
    (
        ('latest', 'test_predefined'),
        ('pending', 'test_predefined'),
        ('earliest', 'test_predefined'),
        (-1, ValueError),
        (0, 'test_number'),
        (1, 'test_number'),
        (4000000, 'test_number'),
        ('0x0', 'test_number'),
        ('0x00', 'test_number'),
Beispiel #25
0
    encode_hex,
    is_address,
    is_bytes,
    is_canonical_address,
    is_dict,
)

from .common import (
    normalize_if,
    normalize_dict,
    normalize_array,
)


normalize_account = to_checksum_address
normalize_account_list = partial(normalize_array, normalizer=normalize_account)

to_empty_or_checksum_address = apply_one_of_formatters((
    (lambda addr: addr == b'', lambda addr: ''),
    (is_canonical_address, to_checksum_address),
))

TRANSACTION_NORMALIZERS = {
    "hash": encode_hex,
    "nonce": identity,
    "block_hash": partial(normalize_if, conditional_fn=is_bytes, normalizer=encode_hex),
    "block_number": identity,
    "transaction_index": identity,
    "from": to_checksum_address,
    "to": to_empty_or_checksum_address,
    "value": identity,