示例#1
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        block = blocks_list[i]
        block_index = num_blocks - i
        block_number = block.number
        block_timestamp = block.timestamp
        logger.info(
            f"index.py | index_blocks | {self.request.id} | block {block.number} - {block_index}/{num_blocks}"
        )

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(
                is_current=True)

            # Without this check we may end up duplicating an insert operation
            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (current_block_query.count() == 1
                    ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            tx_receipt_dict = fetch_tx_receipts(self, block.transactions)

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions,
                                key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = tx_receipt_dict[tx_hash]

                # Handle user operations
                if tx_target_contract_address == contract_addresses[
                        "user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses[
                        "track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}")
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses[
                        "social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses[
                        "playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses[
                        "user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            total_user_changes, user_ids = user_state_update(
                self, update_task, session, user_factory_txs, block_number,
                block_timestamp)
            user_state_changed = total_user_changes > 0

            total_track_changes, track_ids = track_state_update(
                self, update_task, session, track_factory_txs, block_number,
                block_timestamp)
            track_state_changed = total_track_changes > 0

            social_feature_state_changed = (  # pylint: disable=W0612
                social_feature_state_update(self, update_task, session,
                                            social_feature_factory_txs,
                                            block_number, block_timestamp) > 0)

            # Playlist state operations processed in bulk
            total_playlist_changes, playlist_ids = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number,
                block_timestamp)
            playlist_state_changed = total_playlist_changes > 0

            user_library_state_changed = user_library_state_update(  # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs,
                block_number, block_timestamp)

            track_lexeme_state_changed = (user_state_changed
                                          or track_state_changed)
            session.commit()
            if user_state_changed:
                if user_ids:
                    remove_cached_user_ids(redis, user_ids)
            if track_lexeme_state_changed:
                if track_ids:
                    remove_cached_track_ids(redis, track_ids)
            if playlist_state_changed:
                if playlist_ids:
                    remove_cached_playlist_ids(redis, playlist_ids)

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)
        redis.set(most_recent_indexed_block_hash_redis_key, block.hash.hex())

    if num_blocks > 0:
        logger.warning(
            f"index.py | index_blocks | Indexed {num_blocks} blocks")
示例#2
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        if i % 10 == 0 and i != 0:
            block_index = num_blocks - i
            logger.info(f"index.py | index_blocks | processing block {block_index}/{num_blocks} blocks")

        block = blocks_list[i]
        block_number = block.number
        block_timestamp = block.timestamp

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(is_current=True)

            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (
                current_block_query.count() == 1
            ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions, key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = web3.eth.getTransactionReceipt(tx_hash)

                # Handle user operations
                if tx_target_contract_address == contract_addresses["user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses["track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses["social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses["playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses["user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            user_state_changed = (
                user_state_update(
                    self, update_task, session, user_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            track_state_changed = (
                track_state_update(
                    self, update_task, session, track_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            social_feature_state_changed = ( # pylint: disable=W0612
                social_feature_state_update(
                    self, update_task, session, social_feature_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            # Playlist state operations processed in bulk
            playlist_state_changed = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number, block_timestamp
            )

            user_library_state_changed = user_library_state_update( # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs, block_number, block_timestamp
            )

            # keep search materialized view in sync with db
            # only refresh track_lexeme_dict when necessary
            # social state changes are not factored in since they don't affect track_lexeme_dict
            # write out all pending transactions to db before refreshing view
            track_lexeme_state_changed = (user_state_changed or track_state_changed)
            session.flush()
            if user_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict")
            if track_lexeme_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict")
            if playlist_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict")
                session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict")

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)

    if num_blocks > 0:
        logger.warning(f"index.py | index_blocks | Indexed {num_blocks} blocks")