Example #1
0
def test_abi_decode_single_bytes():
    typ = ['bytes', '8', []]
    assert (b'\x01\x02' + b'\x00' * 6) == abi.decode_single(
        typ, abi.encode_single(typ, b'\x01\x02'))

    typ = ['bytes', '', []]
    assert b'\x01\x02' == abi.decode_single(
        typ, abi.encode_single(typ, b'\x01\x02'))
Example #2
0
def test_encode_decode_bytes():
    bytes8 = ('bytes', '8', [])
    dynamic_bytes = ('bytes', '', [])

    assert decode_single(bytes8, encode_single(
        bytes8, b'\x01\x02')) == (b'\x01\x02' + b'\x00' * 6)
    assert decode_single(dynamic_bytes,
                         encode_single(dynamic_bytes,
                                       b'\x01\x02')) == b'\x01\x02'
Example #3
0
def test_abi_decode_single_real():
    real_data = abi.encode_single(['real', '128x128', []], 1)
    assert abi.decode_single(['real', '128x128', []], real_data) == 1

    real_data = abi.encode_single(['real', '128x128', []], 2**127-1)
    assert abi.decode_single(['real', '128x128', []], real_data) == (2**127-1)*1.0

    real_data = abi.encode_single(['real', '128x128', []], -1)
    assert abi.decode_single(['real', '128x128', []], real_data) == -1

    real_data = abi.encode_single(['real', '128x128', []], -2**127)
    assert abi.decode_single(['real', '128x128', []], real_data) == -2**127
Example #4
0
def test_abi_decode_single_real():
    real_data = abi.encode_single(['real', '128x128', []], 1)
    assert abi.decode_single(['real', '128x128', []], real_data) == 1

    real_data = abi.encode_single(['real', '128x128', []], 2**127 - 1)
    assert abi.decode_single(['real', '128x128', []],
                             real_data) == (2**127 - 1) * 1.0

    real_data = abi.encode_single(['real', '128x128', []], -1)
    assert abi.decode_single(['real', '128x128', []], real_data) == -1

    real_data = abi.encode_single(['real', '128x128', []], -2**127)
    assert abi.decode_single(['real', '128x128', []], real_data) == -2**127
Example #5
0
def test_abi_decode_single_fixed():
    fixed_data = abi.encode_single(['fixed', '128x128', []], 1)
    assert abi.decode_single(['fixed', '128x128', []], fixed_data) == 1

    fixed_data = abi.encode_single(['fixed', '128x128', []], 2**127 - 1)
    assert abi.decode_single(['fixed', '128x128', []],
                             fixed_data) == (2**127 - 1) * 1.0

    fixed_data = abi.encode_single(['fixed', '128x128', []], -1)
    assert abi.decode_single(['fixed', '128x128', []], fixed_data) == -1

    fixed_data = abi.encode_single(['fixed', '128x128', []], -2**127)
    assert abi.decode_single(['fixed', '128x128', []], fixed_data) == -2**127
Example #6
0
def test_encode_decode_fixed():
    fixed128x128 = ('fixed', '128x128', [])

    fixed_data = encode_single(fixed128x128, 1)
    assert decode_single(fixed128x128, fixed_data) == 1

    fixed_data = encode_single(fixed128x128, 2**127 - 1)
    assert decode_single(fixed128x128, fixed_data) == (2**127 - 1) * 1.0

    fixed_data = encode_single(fixed128x128, -1)
    assert decode_single(fixed128x128, fixed_data) == -1

    fixed_data = encode_single(fixed128x128, -2**127)
    assert decode_single(fixed128x128, fixed_data) == -2**127
Example #7
0
def test_abi_decode_single_hash():
    typ = ['hash', '8', []]
    assert b'\x01' * \
        8 == abi.decode_single(typ, abi.encode_single(typ, b'\x01' * 8))
    async def process_block_for_contract(self, collectible_address):
        if collectible_address in self._processing:
            log.warning("Already processing {}".format(collectible_address))
            return

        self._processing[collectible_address] = True

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            collectible = await con.fetchrow(
                "SELECT * FROM collectibles WHERE contract_address = $1",
                collectible_address)
            if collectible is None:
                log.error(
                    "Unable to find collectible with contract_address {}".
                    format(collectible_address))
                del self._processing[collectible_address]
                return

            if collectible['type'] == 1:
                events = await con.fetch(
                    "SELECT * FROM collectible_transfer_events "
                    "WHERE collectible_address = $1", collectible_address)
            elif collectible['type'] == 721:
                # use default erc721 event
                # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md
                events = [{
                    'collectible_address': collectible_address,
                    'contract_address': collectible_address,
                    'name': 'Transfer',
                    'topic_hash':
                    '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
                    'arguments': ['address', 'address', 'uint256'],
                    'indexed_arguments': [True, True, False],
                    'to_address_offset': 1,
                    'token_id_offset': 2
                }]

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            del self._processing[collectible_address]
            log.info(
                "Aborting {} because latest block number < collectible's next block"
                .format(collectible_address))
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        updates = {}

        for event in events:
            contract_address = event['contract_address']

            while True:
                try:
                    logs = await self.eth.eth_getLogs(
                        fromBlock=from_block_number,
                        toBlock=to_block_number,
                        topics=[[event['topic_hash']]],
                        address=contract_address)
                    break
                except:
                    log.exception("error getting logs for block")
                    continue

            if len(logs):

                for _log in logs:
                    indexed_data = _log['topics'][1:]
                    data_types = [
                        t for t, i in zip(event['arguments'],
                                          event['indexed_arguments'])
                        if i is False
                    ]
                    try:
                        data = decode_abi(data_types,
                                          data_decoder(_log['data']))
                    except:
                        log.exception("Error decoding log data: {} {}".format(
                            data_types, _log['data']))
                        del self._processing[collectible_address]
                        return
                    arguments = []
                    try:
                        for t, i in zip(event['arguments'],
                                        event['indexed_arguments']):
                            if i is True:
                                arguments.append(
                                    decode_single(
                                        process_type(t),
                                        data_decoder(indexed_data.pop(0))))
                            else:
                                arguments.append(data.pop(0))
                    except:
                        log.exception("Error compiling event data")
                        log.info("EVENT: {}".format(event))
                        log.info("LOG: {}".format(_log))
                        del self._processing[collectible_address]
                        return

                    to_address = arguments[event['to_address_offset']]
                    token_id = parse_int(arguments[event['token_id_offset']])

                    log.debug("{} #{} -> {} -> {}".format(
                        collectible['name'], token_id, event['name'],
                        to_address))
                    updates[hex(token_id)] = (collectible_address,
                                              hex(token_id), to_address)

        if len(updates) > 0:
            new_tokens = []
            for token_id in list(updates.keys()):
                async with self.pool.acquire() as con:
                    token = await con.fetchrow(
                        "SELECT * FROM collectible_tokens WHERE contract_address = $1 AND token_id = $2",
                        collectible_address, token_id)
                if token is None:
                    # get token details
                    token_uri = None
                    token_uri_data = await self.eth.eth_call(
                        to_address=collectible_address,
                        data="{}{:064x}".format(TOKEN_URI_CALL_DATA,
                                                int(token_id, 16)))
                    if token_uri_data and token_uri_data != "0x":
                        try:
                            token_uri = decode_abi(
                                ['string'],
                                data_decoder(token_uri_data))[0].decode(
                                    'utf-8', errors='replace')
                        except:
                            log.exception("Error decoding tokenURI data")

                    token_image = None
                    token_name = None
                    token_description = None
                    # if token_uri points to a valid url check if it points to json (for the erc721 metadata)
                    parsed_uri = urlparse(token_uri)
                    if token_uri and parsed_uri.netloc and parsed_uri.scheme in [
                            'http', 'https'
                    ]:
                        try:
                            resp = await AsyncHTTPClient(
                                max_clients=100).fetch(parsed_uri.geturl())
                            metadata = json_decode(resp.body)
                            if "properties" in metadata:
                                metadata = metadata['properties']
                            if 'name' in metadata:
                                if type(
                                        metadata['name']
                                ) == dict and 'description' in metadata['name']:
                                    token_name = metadata['name'][
                                        'description']
                                elif type(metadata['name']) == str:
                                    token_name = metadata['name']
                            if 'description' in metadata:
                                if type(
                                        metadata['description']
                                ) == dict and 'description' in metadata[
                                        'description']:
                                    token_description = metadata[
                                        'description']['description']
                                elif type(metadata['description']) == str:
                                    token_description = metadata['description']
                            if 'image' in metadata:
                                if type(
                                        metadata['image']
                                ) == dict and 'description' in metadata[
                                        'image']:
                                    token_image = metadata['image'][
                                        'description']
                                elif type(metadata['image']) == str:
                                    token_image = metadata['image']
                        except:
                            log.exception(
                                "Error getting token metadata for {}:{} from {}"
                                .format(collectible_address, token_id,
                                        token_uri))
                            pass

                    if not token_image:
                        if collectible['image_url_format_string'] is not None:
                            image_format_string = collectible[
                                'image_url_format_string']
                        else:
                            image_format_string = config['collectibles'][
                                'image_format']
                        token_image = image_format_string.format(
                            contract_address=collectible_address,
                            token_id_hex=token_id,
                            token_id_int=int(token_id, 16),
                            token_uri=token_uri)

                    new_token = updates.pop(token_id, ()) + (
                        token_uri, token_name, token_description, token_image)
                    new_tokens.append(new_token)

            async with self.pool.acquire() as con:
                if len(new_tokens) > 0:
                    await con.executemany(
                        "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, token_uri, name, description, image) "
                        "VALUES ($1, $2, $3, $4, $5, $6, $7)", new_tokens)

                await con.executemany(
                    "INSERT INTO collectible_tokens (contract_address, token_id, owner_address) "
                    "VALUES ($1, $2, $3) "
                    "ON CONFLICT (contract_address, token_id) DO UPDATE "
                    "SET owner_address = EXCLUDED.owner_address",
                    list(updates.values()))

        ready = collectible['ready'] or to_block_number == latest_block_number

        self.last_block = to_block_number
        async with self.pool.acquire() as con:
            await con.execute(
                "UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                to_block_number, ready, collectible_address)

        del self._processing[collectible_address]
        if to_block_number < latest_block_number:
            asyncio.get_event_loop().create_task(
                self.process_block_for_contract(collectible_address))
Example #9
0
def test_encode_decode_hash():
    hash8 = ('hash', '8', [])

    hash1 = b'\x01' * 8
    assert hash1 == decode_single(hash8, encode_single(hash8, hash1))
    async def process_block_for_asset_creation_contract(self, collectible_address):

        if collectible_address in self._processing and not self._processing[collectible_address].done():
            log.debug("Already processing {}".format(collectible_address))
            self._queue.add(collectible_address)
            return

        self._processing[collectible_address] = asyncio.Task.current_task()

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1",
                                             collectible_address)

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            del self._processing[collectible_address]
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        topics = [[ASSET_CREATED_TOPIC]]

        log.debug("Getting logs for {} from blocks {}->{}".format(collectible_address, from_block_number, to_block_number))
        req_start = time.time()
        while True:
            try:
                logs = await self.eth.eth_getLogs(
                    fromBlock=from_block_number, toBlock=to_block_number,
                    topics=topics,
                    address=collectible['contract_address'])
                if time.time() - req_start > 10:
                    log.warning("eth_getLogs(fromBlock={}, toBlock={}, topics={}, address={}) took {} seconds to complete".format(
                        from_block_number, to_block_number, topics, collectible['contract_address'], time.time() - req_start))
                break
            except JsonRPCError as e:
                if e.message != "Unknown block number":
                    log.exception("unexpected error getting logs for fungible creation contract: {} (after {} seconds)".format(collectible_address, time.time() - req_start))
                await asyncio.sleep(random.random())
                continue
            except:
                log.exception("unexpected error getting logs for fungible creation contract: {} (after {} seconds)".format(collectible_address, time.time() - req_start))
                await asyncio.sleep(random.random())
                continue

        if len(logs):

            log.debug("Found {} logs for {} in blocks {}->{}".format(len(logs), collectible_address, from_block_number, to_block_number))

            for i, _log in enumerate(logs):
                log_block_number = int(_log['blockNumber'], 16)
                if log_block_number < from_block_number or log_block_number > to_block_number:
                    log.error("go unexpected block number in logs: {} (fromBlock={}, toBlock={}, collectible_address={})".format(
                        log_block_number, from_block_number, to_block_number, collectible['contract_address']))
                    del self._processing[collectible_address]
                    return

                topic = _log['topics'][0]

                if topic != ASSET_CREATED_TOPIC:
                    continue

                asset_contract_address = decode_single(
                    process_type('address'), data_decoder(_log['topics'][1]))

                try:
                    token_uri_data = await self.eth.eth_call(to_address=asset_contract_address, data=TOKEN_URI_CALL_DATA)
                except:
                    log.exception("Error getting token uri for fungible collectible asset {}".format(asset_contract_address))
                    continue
                asset_token_uri = decode_abi(['string'], data_decoder(token_uri_data))
                try:
                    asset_token_uri = asset_token_uri[0].decode('utf-8', errors='replace')
                except:
                    log.exception("Invalid tokenURI for fungible collectible asset {}".format(asset_contract_address))
                    continue
                try:
                    name_data = await self.eth.eth_call(to_address=asset_contract_address, data=NAME_CALL_DATA)
                except:
                    log.exception("Error getting name for fungible collectible asset {}".format(asset_contract_address))
                    continue
                asset_name = decode_abi(['string'], data_decoder(name_data))
                try:
                    asset_name = asset_name[0].decode('utf-8', errors='replace')
                except:
                    log.exception("Invalid name for fungible collectible asset {}".format(asset_contract_address))
                    continue

                try:
                    creator_data = await self.eth.eth_call(to_address=asset_contract_address, data=CREATOR_CALL_DATA)
                except:
                    log.exception("Error getting creator for fungible collectible asset {}".format(asset_contract_address))
                    continue
                asset_creator = decode_abi(['address'], data_decoder(creator_data))[0]
                try:
                    total_supply_data = await self.eth.eth_call(to_address=asset_contract_address, data=TOTAL_SUPPLY_CALL_DATA)
                except:
                    log.exception("Error getting total supply for fungible collectible asset {}".format(asset_contract_address))
                    continue
                total_supply = decode_abi(['uint256'], data_decoder(total_supply_data))[0]

                # owner is currently always the address that triggered the AssetCreate event
                tx = await self.eth.eth_getTransactionByHash(_log['transactionHash'])
                asset_owner = tx['from']

                asset_image = None
                asset_description = None
                parsed_uri = urlparse(asset_token_uri)
                if asset_token_uri and parsed_uri.netloc and parsed_uri.scheme in ['http', 'https']:
                    try:
                        resp = await AsyncHTTPClient(max_clients=100).fetch(parsed_uri.geturl())
                        metadata = json_decode(resp.body)
                        if "properties" in metadata:
                            metadata = metadata['properties']
                        if 'name' in metadata:
                            if type(metadata['name']) == dict and 'description' in metadata['name']:
                                asset_name = metadata['name']['description']
                            elif type(metadata['name']) == str:
                                asset_name = metadata['name']
                        if 'description' in metadata:
                            if type(metadata['description']) == dict and 'description' in metadata['description']:
                                asset_description = metadata['description']['description']
                            elif type(metadata['description']) == str:
                                asset_description = metadata['description']
                        if 'image' in metadata:
                            if type(metadata['image']) == dict and 'description' in metadata['image']:
                                asset_image = metadata['image']['description']
                            elif type(metadata['image']) == str:
                                asset_image = metadata['image']
                    except:
                        log.exception("Error getting token metadata for {}:{} from {}".format(
                            collectible_address, asset_contract_address, asset_token_uri))
                        pass

                if asset_image is None:
                    if collectible['image_url_format_string'] is not None:
                        asset_image = collectible['image_url_format_string'].format(
                            contract_address=asset_contract_address,
                            collectible_address=collectible_address,
                            name=asset_name,
                            token_uri=asset_token_uri,
                            creator_address=asset_creator)

                async with self.pool.acquire() as con:
                    await con.execute(
                        "INSERT INTO fungible_collectibles (contract_address, collectible_address, name, description, token_uri, creator_address, last_block, image) "
                        "VALUES ($1, $2, $3, $4, $5, $6, $7, $8) "
                        "ON CONFLICT (contract_address) DO NOTHING",
                        asset_contract_address, collectible_address, asset_name, asset_description, asset_token_uri, asset_creator, log_block_number, asset_image)
                    await con.execute(
                        "INSERT INTO fungible_collectible_balances (contract_address, owner_address, balance) "
                        "VALUES ($1, $2, $3)",
                        asset_contract_address, asset_owner, hex(total_supply))
                asyncio.get_event_loop().create_task(self.process_block_for_asset_contract(asset_contract_address))

        else:
            log.debug("No logs found for {} in blocks {}->{}".format(collectible_address, from_block_number, to_block_number))

        ready = collectible['ready'] or to_block_number == latest_block_number

        async with self.pool.acquire() as con:
            await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                              to_block_number, ready, collectible_address)

        del self._processing[collectible_address]
        if to_block_number < latest_block_number or collectible_address in self._queue:
            self._queue.discard(collectible_address)
            asyncio.get_event_loop().create_task(self.process_block_for_asset_creation_contract(collectible_address))
Example #11
0
def test_abi_decode_single_hash():
    typ = ['hash', '8', []]
    assert b'\x01'*8 == abi.decode_single(typ, abi.encode_single(typ, b'\x01'*8))
Example #12
0
    async def process_block(self, blocknumber=None):
        if self._processing is True:
            return
        self._processing = True
        self.__call += 1

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            if latest_block_number is None:
                log.warning("no blocks processed by block monitor yet")
                self._processing = False
                return

            collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1",
                                             CRYPTO_PUNKS_CONTRACT_ADDRESS)
        if collectible is None:
            return

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            self._processing = False
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        topics = [[TRANSFER_TOPIC, PUNK_BOUGHT_TOPIC, PUNK_TRANSFER_TOPIC]]

        while True:
            try:
                logs = await self.eth.eth_getLogs(
                    fromBlock=from_block_number, toBlock=to_block_number,
                    topics=topics,
                    address=CRYPTO_PUNKS_CONTRACT_ADDRESS)
                break
            except:
                continue

        if len(logs):

            transactions = {}
            updates = []

            for i, _log in enumerate(logs):
                tx = transactions.setdefault(_log['transactionHash'], {'function': 'unknown', 'done': False})
                log_block_number = int(_log['blockNumber'], 16)
                assert log_block_number >= from_block_number and log_block_number <= to_block_number
                if tx['done'] is True:
                    log.error("tried to reprocess transaction that was already added")
                    continue

                topic = _log['topics'][0]

                if topic == TRANSFER_TOPIC:
                    tx['to_address'] = decode_single(process_type('address'),
                                                     data_decoder(_log['topics'][2]))
                elif topic == PUNK_TRANSFER_TOPIC:
                    tx['token_id'] = decode_abi(['uint256'], data_decoder(_log['data']))[0]
                    tx['function'] = 'transferPunk'
                elif topic == PUNK_BOUGHT_TOPIC:
                    tx['token_id'] = parse_int(decode_single(process_type('address'),
                                                             data_decoder(_log['topics'][1])))
                    to_address = decode_single(process_type('address'),
                                               data_decoder(_log['topics'][3]))
                    if to_address == "0x0000000000000000000000000000000000000000":
                        tx['function'] = 'acceptBidForPunk'
                    else:
                        tx['function'] = 'buyPunk'
                else:
                    log.warning("got unknown topic: {}".format(topic))
                    continue

                if 'to_address' in tx and 'token_id' in tx:

                    tx['done'] = True
                    log.info("CryptoPunk #{} -> {} -> {}".format(
                        tx['token_id'], tx['function'], tx['to_address']))
                    token_image = config['collectibles']['image_format'].format(
                        contract_address=CRYPTO_PUNKS_CONTRACT_ADDRESS,
                        token_id=tx['token_id'])
                    updates.append((CRYPTO_PUNKS_CONTRACT_ADDRESS, hex(tx['token_id']), tx['to_address'], token_image))

            async with self.pool.acquire() as con:
                await con.executemany(
                    "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, image) "
                    "VALUES ($1, $2, $3, $4) "
                    "ON CONFLICT (contract_address, token_id) DO UPDATE "
                    "SET owner_address = EXCLUDED.owner_address",
                    updates)

        ready = collectible['ready'] or to_block_number == latest_block_number

        async with self.pool.acquire() as con:
            await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                              to_block_number, ready, CRYPTO_PUNKS_CONTRACT_ADDRESS)

        self._processing = False
        if to_block_number < latest_block_number:
            asyncio.get_event_loop().create_task(self.process_block())
    async def process_transaction(self, transaction):

        to_address = transaction['to']
        # make sure we use a valid encoding of "empty" for contract deployments
        if to_address is None:
            to_address = "0x"
        from_address = transaction['from']

        async with self.pool.acquire() as con:
            # find if we have a record of this tx by checking the from address and nonce
            db_txs = await con.fetch("SELECT * FROM transactions WHERE "
                                     "from_address = $1 AND nonce = $2",
                                     from_address, parse_int(transaction['nonce']))
            if len(db_txs) > 1:
                # see if one has the same hash
                db_tx = await con.fetchrow("SELECT * FROM transactions WHERE "
                                           "from_address = $1 AND nonce = $2 AND hash = $3 AND (status != $4 OR status IS NULL)",
                                           from_address, parse_int(transaction['nonce']), transaction['hash'], 'error')
                if db_tx is None:
                    # find if there are any that aren't marked as error
                    no_error = await con.fetch("SELECT * FROM transactions WHERE "
                                               "from_address = $1 AND nonce = $2 AND hash != $3 AND (status != $4 OR status IS NULL)",
                                               from_address, parse_int(transaction['nonce']), transaction['hash'], 'error')
                    if len(no_error) == 1:
                        db_tx = no_error[0]
                    elif len(no_error) != 0:
                        log.warning("Multiple transactions from '{}' exist with nonce '{}' in unknown state")

            elif len(db_txs) == 1:
                db_tx = db_txs[0]
            else:
                db_tx = None

            # if we have a previous transaction, do some checking to see what's going on
            # see if this is an overwritten transaction
            # if the status of the old tx was previously an error, we don't care about it
            # otherwise, we have to notify the interested parties of the overwrite

            if db_tx and db_tx['hash'] != transaction['hash'] and db_tx['status'] != 'error':

                if db_tx['v'] is not None:
                    log.warning("found overwritten transaction!")
                    log.warning("tx from: {}".format(from_address))
                    log.warning("nonce: {}".format(parse_int(transaction['nonce'])))
                    log.warning("old tx hash: {}".format(db_tx['hash']))
                    log.warning("new tx hash: {}".format(transaction['hash']))

                manager_dispatcher.update_transaction(db_tx['transaction_id'], 'error')
                db_tx = None

            # check for erc20 transfers
            erc20_transfers = []
            if transaction['blockNumber'] is not None and \
               'logs' in transaction and \
               len(transaction['logs']) > 0:

                # find any logs with erc20 token related topics
                for _log in transaction['logs']:
                    if len(_log['topics']) > 0:
                        # Transfer(address,address,uint256)
                        if _log['topics'][0] == TRANSFER_TOPIC:
                            # make sure the log address is for one we're interested in
                            is_known_token = await con.fetchval("SELECT 1 FROM tokens WHERE contract_address = $1", _log['address'])
                            if not is_known_token:
                                continue
                            if len(_log['topics']) < 3 or len(_log['data']) != 66:
                                log.warning('Got invalid erc20 Transfer event in tx: {}'.format(transaction['hash']))
                                continue
                            erc20_from_address = decode_single(('address', '', []), data_decoder(_log['topics'][1]))
                            erc20_to_address = decode_single(('address', '', []), data_decoder(_log['topics'][2]))
                            erc20_is_interesting = await con.fetchval(
                                "SELECT 1 FROM token_registrations "
                                "WHERE eth_address = $1 OR eth_address = $2",
                                erc20_from_address, erc20_to_address)
                            if erc20_is_interesting:
                                erc20_value = decode_abi(['uint256'], data_decoder(_log['data']))[0]

                                erc20_transfers.append((_log['address'], int(_log['transactionLogIndex'], 16), erc20_from_address, erc20_to_address, hex(erc20_value), 'confirmed'))

                        # special checks for WETH, since it's rarely 'Transfer'ed, but we
                        # still need to update it
                        elif (_log['topics'][0] == DEPOSIT_TOPIC or _log['topics'][0] == WITHDRAWAL_TOPIC) and _log['address'] == WETH_CONTRACT_ADDRESS:
                            eth_address = decode_single(('address', '', []), data_decoder(_log['topics'][1]))
                            erc20_is_interesting = await con.fetchval(
                                "SELECT 1 FROM token_registrations "
                                "WHERE eth_address = $1",
                                eth_address)
                            if erc20_is_interesting:
                                erc20_value = decode_abi(['uint256'], data_decoder(_log['data']))[0]
                                if _log['topics'][0] == DEPOSIT_TOPIC:
                                    erc20_to_address = eth_address
                                    erc20_from_address = "0x0000000000000000000000000000000000000000"
                                else:
                                    erc20_to_address = "0x0000000000000000000000000000000000000000"
                                    erc20_from_address = eth_address
                                erc20_transfers.append((WETH_CONTRACT_ADDRESS, int(_log['transactionLogIndex'], 16), erc20_from_address, erc20_to_address, hex(erc20_value), 'confirmed'))

            elif transaction['blockNumber'] is None and db_tx is None:
                # transaction is pending, attempt to guess if this is a token
                # transaction based off it's input
                if transaction['input']:
                    data = transaction['input']
                    if (data.startswith("0xa9059cbb") and len(data) == 138) or (data.startswith("0x23b872dd") and len(data) == 202):
                        token_value = hex(int(data[-64:], 16))
                        if data.startswith("0x23b872dd"):
                            erc20_from_address = "0x" + data[34:74]
                            erc20_to_address = "0x" + data[98:138]
                        else:
                            erc20_from_address = from_address
                            erc20_to_address = "0x" + data[34:74]
                        erc20_transfers.append((to_address, 0, erc20_from_address, erc20_to_address, token_value, 'unconfirmed'))
                    # special WETH handling
                    elif data == '0xd0e30db0' and transaction['to'] == WETH_CONTRACT_ADDRESS:
                        erc20_transfers.append((WETH_CONTRACT_ADDRESS, 0, "0x0000000000000000000000000000000000000000", transaction['from'], transaction['value'], 'unconfirmed'))
                    elif data.startswith('0x2e1a7d4d') and len(data) == 74:
                        token_value = hex(int(data[-64:], 16))
                        erc20_transfers.append((WETH_CONTRACT_ADDRESS, 0, transaction['from'], "0x0000000000000000000000000000000000000000", token_value, 'unconfirmed'))

            if db_tx:
                is_interesting = True
            else:
                # find out if there is anyone interested in this transaction
                is_interesting = await con.fetchval("SELECT 1 FROM notification_registrations "
                                                    "WHERE eth_address = $1 OR eth_address = $2",
                                                    to_address, from_address)
            if not is_interesting and len(erc20_transfers) > 0:
                for _, _, erc20_from_address, erc20_to_address, _, _ in erc20_transfers:
                    is_interesting = await con.fetchval("SELECT 1 FROM notification_registrations "
                                                        "WHERE eth_address = $1 OR eth_address = $2",
                                                        erc20_to_address, erc20_from_address)
                    if is_interesting:
                        break
                    is_interesting = await con.fetchval("SELECT 1 FROM token_registrations "
                                                        "WHERE eth_address = $1 OR eth_address = $2",
                                                        erc20_to_address, erc20_from_address)
                    if is_interesting:
                        break

            if not is_interesting:
                return

            if db_tx is None:
                # if so, add it to the database and trigger an update
                # add tx to database
                db_tx = await con.fetchrow(
                    "INSERT INTO transactions "
                    "(hash, from_address, to_address, nonce, "
                    "value, gas, gas_price, "
                    "data) "
                    "VALUES ($1, $2, $3, $4, $5, $6, $7, $8) "
                    "RETURNING transaction_id",
                    transaction['hash'], from_address, to_address, parse_int(transaction['nonce']),
                    hex(parse_int(transaction['value'])), hex(parse_int(transaction['gas'])), hex(parse_int(transaction['gasPrice'])),
                    transaction['input'])

            for erc20_contract_address, transaction_log_index, erc20_from_address, erc20_to_address, erc20_value, erc20_status in erc20_transfers:
                is_interesting = await con.fetchval("SELECT 1 FROM notification_registrations "
                                                    "WHERE eth_address = $1 OR eth_address = $2",
                                                    erc20_to_address, erc20_from_address)
                if not is_interesting:
                    is_interesting = await con.fetchrow("SELECT 1 FROM token_registrations "
                                                        "WHERE eth_address = $1 OR eth_address = $2",
                                                        erc20_to_address, erc20_from_address)

                if is_interesting:
                    await con.execute(
                        "INSERT INTO token_transactions "
                        "(transaction_id, transaction_log_index, contract_address, from_address, to_address, value, status) "
                        "VALUES ($1, $2, $3, $4, $5, $6, $7) "
                        "ON CONFLICT (transaction_id, transaction_log_index) DO UPDATE "
                        "SET from_address = EXCLUDED.from_address, to_address = EXCLUDED.to_address, value = EXCLUDED.value",
                        db_tx['transaction_id'], transaction_log_index, erc20_contract_address,
                        erc20_from_address, erc20_to_address, erc20_value, erc20_status)

            manager_dispatcher.update_transaction(
                db_tx['transaction_id'],
                'confirmed' if transaction['blockNumber'] is not None else 'unconfirmed')
            return db_tx['transaction_id']
Example #14
0
def decode_single_address(address):
    """decodes address data from 32 byte logs"""
    return decode_single(('address', '', []), data_decoder(address))
Example #15
0
def test_abi_decode_single_bytes():
    typ = ['bytes', '8', []]
    assert (b'\x01\x02' + b'\x00'*6) == abi.decode_single(typ, abi.encode_single(typ, b'\x01\x02'))

    typ = ['bytes', '', []]
    assert b'\x01\x02' == abi.decode_single(typ, abi.encode_single(typ, b'\x01\x02'))
    async def process_block_for_asset_contract(self, contract_address):

        if contract_address in self._processing and not self._processing[contract_address].done():
            log.debug("Already processing {}".format(contract_address))
            self._queue.add(contract_address)
            return

        self._processing[contract_address] = asyncio.Task.current_task()

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            collectible = await con.fetchrow("SELECT * FROM fungible_collectibles WHERE contract_address = $1",
                                             contract_address)

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            del self._processing[contract_address]
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        topics = [[ASSET_TRANSFER_TOPIC]]

        updates = {}

        req_start = time.time()
        while True:
            try:
                logs = await self.eth.eth_getLogs(
                    fromBlock=from_block_number, toBlock=to_block_number,
                    topics=topics,
                    address=contract_address)
                if time.time() - req_start > 10:
                    log.warning("eth_getLogs(fromBlock={}, toBlock={}, topics={}, address={}) took {} seconds to complete".format(
                        from_block_number, to_block_number, topics, contract_address, time.time() - req_start))
                break
            except JsonRPCError as e:
                if e.message != "Unknown block number":
                    log.exception("unexpected error getting logs for fungible asset contract: {} (after {} seconds)".format(contract_address, time.time() - req_start))
                await asyncio.sleep(random.random())
                continue
            except:
                log.exception("unexpected error getting logs for fungible asset contract: {} (after {} seconds)".format(contract_address, time.time() - req_start))
                # backoff randomly
                await asyncio.sleep(random.random())
                continue

        if len(logs):

            for i, _log in enumerate(logs):
                log_block_number = int(_log['blockNumber'], 16)
                if log_block_number < from_block_number or log_block_number > to_block_number:
                    log.error("go unexpected block number in logs: {} (fromBlock={}, toBlock={}, address={})".format(
                        log_block_number, from_block_number, to_block_number, contract_address))
                    del self._processing[contract_address]
                    return

                topic = _log['topics'][0]

                if topic == ASSET_TRANSFER_TOPIC:
                    indexed_data = _log['topics'][1:]
                    data_types = ['uint256']
                    try:
                        data = decode_abi(data_types, data_decoder(_log['data']))
                    except:
                        log.exception("Error decoding log data: {} {}".format(data_types, _log['data']))
                        del self._processing[contract_address]
                        return
                    arguments = []
                    try:
                        for t, i in [('address', True), ('address', True), ('uint256', False)]:
                            if i is True:
                                arguments.append(decode_single(process_type(t), data_decoder(indexed_data.pop(0))))
                            else:
                                arguments.append(data.pop(0))
                    except:
                        log.exception("Error compiling event data")
                        log.info("LOG: {}".format(_log))
                        del self._processing[contract_address]
                        return

                    from_address = arguments[0]
                    to_address = arguments[1]
                    value = parse_int(arguments[2])

                    async with self.pool.acquire() as con:
                        if from_address and from_address not in updates:
                            balance = await con.fetchval(
                                "SELECT balance FROM fungible_collectible_balances WHERE contract_address = $1 AND owner_address = $2",
                                contract_address, from_address)
                            updates[from_address] = parse_int(balance) if balance is not None else 0

                        if to_address not in updates:
                            balance = await con.fetchval(
                                "SELECT balance FROM fungible_collectible_balances WHERE contract_address = $1 AND owner_address = $2",
                                contract_address, to_address)
                            updates[to_address] = parse_int(balance) if balance is not None else 0

                    updates[from_address] -= value
                    updates[to_address] += value

            if len(updates) > 0:
                async with self.pool.acquire() as con:
                    await con.executemany(
                        "INSERT INTO fungible_collectible_balances (contract_address, owner_address, balance) "
                        "VALUES ($1, $2, $3) "
                        "ON CONFLICT (contract_address, owner_address) DO UPDATE "
                        "SET balance = EXCLUDED.balance",
                        [(contract_address, address, hex(value)) for address, value in updates.items()])

        ready = collectible['ready'] or to_block_number == latest_block_number

        async with self.pool.acquire() as con:
            await con.execute("UPDATE fungible_collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                              to_block_number, ready, contract_address)

        del self._processing[contract_address]
        if to_block_number < latest_block_number or contract_address in self._queue:
            self._queue.discard(contract_address)
            asyncio.get_event_loop().create_task(self.process_block_for_asset_contract(contract_address))
    async def process_block_for_contract(self, collectible_address):
        if collectible_address in self._processing:
            return

        self._processing[collectible_address] = True

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1",
                                             collectible_address)
            if collectible['type'] == 1:
                events = await con.fetch("SELECT * FROM collectible_transfer_events "
                                         "WHERE collectible_address = $1",
                                         collectible_address)
            elif collectible['type'] == 721:
                # use default erc721 event
                # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md
                events = [{
                    'collectible_address': collectible_address,
                    'contract_address': collectible_address,
                    'name': 'Transfer',
                    'topic_hash': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
                    'arguments': ['address', 'address', 'uint256'],
                    'indexed_arguments': [True, True, False],
                    'to_address_offset': 1,
                    'token_id_offset': 2
                }]

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            del self._processing[collectible_address]
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        updates = {}

        for event in events:
            contract_address = event['contract_address']

            while True:
                try:
                    logs = await self.eth.eth_getLogs(
                        fromBlock=from_block_number, toBlock=to_block_number,
                        topics=[[event['topic_hash']]],
                        address=contract_address)
                    break
                except:
                    log.exception("error getting logs for block")
                    continue

            if len(logs):

                for _log in logs:
                    indexed_data = _log['topics'][1:]
                    data_types = [t for t, i in zip(event['arguments'], event['indexed_arguments']) if i is False]
                    try:
                        data = decode_abi(data_types, data_decoder(_log['data']))
                    except:
                        log.exception("Error decoding log data: {} {}".format(data_types, _log['data']))
                        del self._processing[collectible_address]
                        return
                    arguments = []
                    try:
                        for t, i in zip(event['arguments'], event['indexed_arguments']):
                            if i is True:
                                arguments.append(decode_single(process_type(t), data_decoder(indexed_data.pop(0))))
                            else:
                                arguments.append(data.pop(0))
                    except:
                        log.exception("Error compiling event data")

                    to_address = arguments[event['to_address_offset']]
                    token_id = parse_int(arguments[event['token_id_offset']])

                    log.debug("{} #{} -> {} -> {}".format(collectible['name'], token_id,
                                                          event['name'], to_address))
                    token_image = config['collectibles']['image_format'].format(
                        contract_address=collectible_address,
                        token_id=token_id)
                    updates[hex(token_id)] = (collectible_address, hex(token_id), to_address, token_image)

        if len(updates) > 0:
            async with self.pool.acquire() as con:
                await con.executemany(
                    "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, image) "
                    "VALUES ($1, $2, $3, $4) "
                    "ON CONFLICT (contract_address, token_id) DO UPDATE "
                    "SET owner_address = EXCLUDED.owner_address",
                    list(updates.values()))

        ready = collectible['ready'] or to_block_number == latest_block_number

        self.last_block = to_block_number
        async with self.pool.acquire() as con:
            await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                              to_block_number, ready, collectible_address)

        del self._processing[collectible_address]
        #log.info("Processed blocks #{} -> #{} for {} in {} seconds".format(
        #    from_block_number, to_block_number, collectible['name'], time.time() - starttime))
        if to_block_number < latest_block_number:
            asyncio.ensure_future(self.process_block_for_contract(contract_address))
Example #18
0
    async def process_block_for_contract(self, collectible_address):
        if collectible_address in self._processing and not self._processing[
                collectible_address].done():
            log.debug("Already processing {}".format(collectible_address))
            return

        self._processing[collectible_address] = asyncio.Task.current_task()

        async with self.pool.acquire() as con:
            latest_block_number = await con.fetchval(
                "SELECT blocknumber FROM last_blocknumber")
            collectible = await con.fetchrow(
                "SELECT * FROM collectibles WHERE contract_address = $1",
                collectible_address)
            if collectible is None:
                log.error(
                    "Unable to find collectible with contract_address {}".
                    format(collectible_address))
                del self._processing[collectible_address]
                return

            if collectible['type'] == 1:
                events = await con.fetch(
                    "SELECT * FROM collectible_transfer_events "
                    "WHERE collectible_address = $1", collectible_address)
            elif collectible['type'] == 3:
                # use default old (token id not indexed) erc721 event
                events = [{
                    'collectible_address': collectible_address,
                    'contract_address': collectible_address,
                    'name': 'Transfer',
                    'topic_hash':
                    '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
                    'arguments': ['address', 'address', 'uint256'],
                    'indexed_arguments': [True, True, False],
                    'to_address_offset': 1,
                    'token_id_offset': 2
                }]
            elif collectible['type'] == 721:
                # use default erc721 event
                # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md
                events = [{
                    'collectible_address': collectible_address,
                    'contract_address': collectible_address,
                    'name': 'Transfer',
                    'topic_hash':
                    '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
                    'arguments': ['address', 'address', 'uint256'],
                    'indexed_arguments': [True, True, True],
                    'to_address_offset': 1,
                    'token_id_offset': 2
                }]
            else:
                log.error("Collectible with unknown type {}".format(
                    collectible_address))
                del self._processing[collectible_address]
                return

        from_block_number = collectible['last_block'] + 1

        if latest_block_number < from_block_number:
            del self._processing[collectible_address]
            log.info(
                "Aborting {} because latest block number < collectible's next block"
                .format(collectible_address))
            return

        to_block_number = min(from_block_number + 1000, latest_block_number)

        updates = {}

        for event in events:
            contract_address = event['contract_address']

            while True:
                try:
                    logs = await self.eth.eth_getLogs(
                        fromBlock=from_block_number,
                        toBlock=to_block_number,
                        topics=[[event['topic_hash']]],
                        address=contract_address)
                    break
                except Exception as e:
                    if hasattr(
                            e,
                            'message') and e.message != "Unknown block number":
                        log.exception(
                            "unexpected error getting logs for collectible at address: {}"
                            .format(contract_address))
                    continue

            if len(logs):

                for _log in logs:
                    indexed_data = _log['topics'][1:]
                    data_types = [
                        t for t, i in zip(event['arguments'],
                                          event['indexed_arguments'])
                        if i is False
                    ]
                    try:
                        data = decode_abi(data_types,
                                          data_decoder(_log['data']))
                    except:
                        log.exception("Error decoding log data: {} {}".format(
                            data_types, _log['data']))
                        del self._processing[collectible_address]
                        return
                    arguments = []
                    try:
                        for t, i in zip(event['arguments'],
                                        event['indexed_arguments']):
                            if i is True:
                                arguments.append(
                                    decode_single(
                                        process_type(t),
                                        data_decoder(indexed_data.pop(0))))
                            else:
                                arguments.append(data.pop(0))
                    except:
                        log.exception("Error compiling event data")
                        log.info("EVENT: {}".format(event))
                        log.info("LOG: {}".format(_log))
                        del self._processing[collectible_address]
                        return

                    to_address = arguments[event['to_address_offset']]
                    token_id = parse_int(arguments[event['token_id_offset']])

                    if collectible['ready'] is False:
                        log.info("{} #{} -> {} -> {}".format(
                            collectible['name'], token_id, event['name'],
                            to_address))
                    updates[hex(token_id)] = (collectible_address,
                                              hex(token_id), to_address)

        if len(updates) > 0:
            new_tokens = []
            for token_id in list(updates.keys()):
                async with self.pool.acquire() as con:
                    token = await con.fetchrow(
                        "SELECT * FROM collectible_tokens WHERE contract_address = $1 AND token_id = $2",
                        collectible_address, token_id)
                if token is None:
                    token_image = None
                    token_name = None
                    token_description = None
                    token_uri = None
                    token_uri_data = None

                    if collectible_address == MLB_CONTRACT_ADDRESS:
                        url = MLB_METADATA_URL.format(token_id)
                        try:
                            resp = await AsyncHTTPClient(max_clients=100
                                                         ).fetch(url)
                            metadata = json_decode(resp.body)
                            if 'fullName' not in metadata['result'][
                                    'mlbPlayerInfo']:
                                token_name = None
                            else:
                                token_name = metadata['result'][
                                    'mlbPlayerInfo']['fullName']
                            token_image = metadata['result']['imagesURL'][
                                'threeSixtyImages']['0']
                        except:
                            log.exception(
                                "Error getting token metadata for {}:{} from {}"
                                .format(collectible_address, token_id, url))
                            pass
                    else:

                        # get token details
                        while True:
                            try:
                                token_uri_data = await self.eth.eth_call(
                                    to_address=collectible_address,
                                    data="{}{:064x}".format(
                                        TOKEN_URI_CALL_DATA, int(token_id,
                                                                 16)))
                                break
                            except JsonRPCError as e:
                                if e.message == 'VM execution error.':
                                    break
                                continue

                        if token_uri_data and token_uri_data != "0x":
                            try:
                                token_uri = decode_abi(
                                    ['string'],
                                    data_decoder(token_uri_data))[0].decode(
                                        'utf-8', errors='replace')
                            except:
                                log.exception("Error decoding tokenURI data")

                        # if token_uri points to a valid url check if it points to json (for the erc721 metadata)
                        parsed_uri = urlparse(token_uri)
                        if token_uri and parsed_uri.netloc and parsed_uri.scheme in [
                                'http', 'https'
                        ]:
                            try:
                                resp = await AsyncHTTPClient(
                                    max_clients=100).fetch(parsed_uri.geturl())
                                metadata = json_decode(resp.body)
                                properties = {}
                                if "properties" in metadata and type(
                                        metadata['properties']) == dict:
                                    properties = metadata['properties']
                                name_prop = properties.get(
                                    'name', metadata.get('name', None))
                                if name_prop:
                                    if type(
                                            name_prop
                                    ) == dict and 'description' in name_prop:
                                        token_name = name_prop['description']
                                    elif type(name_prop) == str:
                                        token_name = name_prop
                                description_prop = properties.get(
                                    'description',
                                    metadata.get('description', None))
                                if description_prop:
                                    if type(
                                            description_prop
                                    ) == dict and 'description' in description_prop:
                                        token_description = description_prop[
                                            'description']
                                    elif type(description_prop) == str:
                                        token_description = description_prop
                                image_prop = properties.get(
                                    'image', metadata.get('image', None))
                                if image_prop:
                                    if type(
                                            image_prop
                                    ) == dict and 'description' in image_prop:
                                        token_image = image_prop['description']
                                    elif type(image_prop) == str:
                                        token_image = image_prop
                            except:
                                log.exception(
                                    "Error getting token metadata for {}:{} from {}"
                                    .format(collectible_address, token_id,
                                            token_uri))
                                pass
                        elif token_uri is not None:
                            log.warning(
                                "token_uri is not a valid url: {}: {}".format(
                                    contract_address, token_uri))

                    if not token_image:
                        if collectible['image_url_format_string'] is not None:
                            image_format_string = collectible[
                                'image_url_format_string']
                        else:
                            image_format_string = config['collectibles'][
                                'image_format']
                        token_image = image_format_string.format(
                            contract_address=collectible_address,
                            token_id_hex=token_id,
                            token_id_int=int(token_id, 16),
                            token_uri=token_uri)

                    log.info("new '{}' collectible: {} {} {} {} {}".format(
                        collectible['name'], token_id, token_uri, token_name,
                        token_description, token_image))
                    new_token = updates.pop(token_id, ()) + (
                        token_uri, token_name, token_description, token_image)
                    new_tokens.append(new_token)

            async with self.pool.acquire() as con:
                if len(new_tokens) > 0:
                    await con.executemany(
                        "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, token_uri, name, description, image) "
                        "VALUES ($1, $2, $3, $4, $5, $6, $7)", new_tokens)

                await con.executemany(
                    "INSERT INTO collectible_tokens (contract_address, token_id, owner_address) "
                    "VALUES ($1, $2, $3) "
                    "ON CONFLICT (contract_address, token_id) DO UPDATE "
                    "SET owner_address = EXCLUDED.owner_address",
                    list(updates.values()))

        ready = collectible['ready'] or to_block_number == latest_block_number

        async with self.pool.acquire() as con:
            await con.execute(
                "UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3",
                to_block_number, ready, collectible_address)

        if collectible_address == MLB_CONTRACT_ADDRESS:
            async with self.pool.acquire() as con:
                tokens = await con.fetch(
                    "SELECT * FROM collectible_tokens WHERE contract_address = $1 AND name IS NULL ORDER BY token_id LIMIT 100",
                    collectible_address)

            updates = []
            for token in tokens:
                token_id = token['token_id']

                url = MLB_METADATA_URL.format(token_id)
                try:
                    resp = await AsyncHTTPClient(max_clients=100).fetch(url)
                    metadata = json_decode(resp.body)
                    if 'mlbPlayerInfo' not in metadata['result']:
                        continue
                    if 'fullName' not in metadata['result']['mlbPlayerInfo']:
                        token_name = None
                    else:
                        token_name = metadata['result']['mlbPlayerInfo'][
                            'fullName']
                    token_image = metadata['result']['imagesURL'][
                        'threeSixtyImages']['0']

                    if token_name == token['name'] and token_image == token[
                            'image']:
                        # nothing to update
                        continue

                    updates.append((collectible_address, token_id, token_name,
                                    token_image))
                    log.info("updated '{}' collectible: {} {} {} {} {}".format(
                        collectible['name'], token_id, None, token_name, None,
                        token_image))
                except:
                    log.exception(
                        "Error getting token metadata for {}:{} from {}".
                        format(collectible_address, token_id, url))
                    pass

            if updates:
                async with self.pool.acquire() as con:
                    await con.executemany(
                        "UPDATE collectible_tokens "
                        "SET name = $3, image = $4 "
                        "WHERE contract_address = $1 AND token_id = $2",
                        updates)

        del self._processing[collectible_address]
        if to_block_number < latest_block_number:
            asyncio.get_event_loop().create_task(
                self.process_block_for_contract(collectible_address))