Exemplo n.º 1
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        if i % 10 == 0 and i != 0:
            block_index = num_blocks - i
            logger.info(f"index.py | index_blocks | processing block {block_index}/{num_blocks} blocks")

        block = blocks_list[i]
        block_number = block.number
        block_timestamp = block.timestamp

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(is_current=True)

            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (
                current_block_query.count() == 1
            ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions, key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = web3.eth.getTransactionReceipt(tx_hash)

                # Handle user operations
                if tx_target_contract_address == contract_addresses["user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses["track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses["social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses["playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses["user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            user_state_changed = (
                user_state_update(
                    self, update_task, session, user_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            track_state_changed = (
                track_state_update(
                    self, update_task, session, track_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            social_feature_state_changed = ( # pylint: disable=W0612
                social_feature_state_update(
                    self, update_task, session, social_feature_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            # Playlist state operations processed in bulk
            playlist_state_changed = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number, block_timestamp
            )

            user_library_state_changed = user_library_state_update( # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs, block_number, block_timestamp
            )

            # keep search materialized view in sync with db
            # only refresh track_lexeme_dict when necessary
            # social state changes are not factored in since they don't affect track_lexeme_dict
            # write out all pending transactions to db before refreshing view
            track_lexeme_state_changed = (user_state_changed or track_state_changed)
            session.flush()
            if user_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict")
            if track_lexeme_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict")
            if playlist_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict")
                session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict")

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)

    if num_blocks > 0:
        logger.warning(f"index.py | index_blocks | Indexed {num_blocks} blocks")
Exemplo n.º 2
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        block = blocks_list[i]
        block_index = num_blocks - i
        block_number = block.number
        block_timestamp = block.timestamp
        logger.info(
            f"index.py | index_blocks | {self.request.id} | block {block.number} - {block_index}/{num_blocks}"
        )

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(
                is_current=True)

            # Without this check we may end up duplicating an insert operation
            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (current_block_query.count() == 1
                    ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            tx_receipt_dict = fetch_tx_receipts(self, block.transactions)

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions,
                                key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = tx_receipt_dict[tx_hash]

                # Handle user operations
                if tx_target_contract_address == contract_addresses[
                        "user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses[
                        "track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}")
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses[
                        "social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses[
                        "playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses[
                        "user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            total_user_changes, user_ids = user_state_update(
                self, update_task, session, user_factory_txs, block_number,
                block_timestamp)
            user_state_changed = total_user_changes > 0

            total_track_changes, track_ids = track_state_update(
                self, update_task, session, track_factory_txs, block_number,
                block_timestamp)
            track_state_changed = total_track_changes > 0

            social_feature_state_changed = (  # pylint: disable=W0612
                social_feature_state_update(self, update_task, session,
                                            social_feature_factory_txs,
                                            block_number, block_timestamp) > 0)

            # Playlist state operations processed in bulk
            total_playlist_changes, playlist_ids = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number,
                block_timestamp)
            playlist_state_changed = total_playlist_changes > 0

            user_library_state_changed = user_library_state_update(  # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs,
                block_number, block_timestamp)

            track_lexeme_state_changed = (user_state_changed
                                          or track_state_changed)
            session.commit()
            if user_state_changed:
                if user_ids:
                    remove_cached_user_ids(redis, user_ids)
            if track_lexeme_state_changed:
                if track_ids:
                    remove_cached_track_ids(redis, track_ids)
            if playlist_state_changed:
                if playlist_ids:
                    remove_cached_playlist_ids(redis, playlist_ids)

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)
        redis.set(most_recent_indexed_block_hash_redis_key, block.hash.hex())

    if num_blocks > 0:
        logger.warning(
            f"index.py | index_blocks | Indexed {num_blocks} blocks")
def test_playlist_indexing_skip_tx(app, mocker):
    """Tests that playlists skip cursed txs without throwing an error and are able to process other tx in block"""
    with app.app_context():
        db = get_db()
        cid_metadata_client = CIDMetadataClient({})
        web3 = Web3()
        challenge_event_bus = setup_challenge_bus()
        update_task = UpdateTask(cid_metadata_client, web3,
                                 challenge_event_bus)

    class TestPlaylistTransaction:
        pass

    blessed_tx_hash = (
        "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3")
    blessed_tx = TestPlaylistTransaction()
    blessed_tx.transactionHash = update_task.web3.toBytes(
        hexstr=blessed_tx_hash)
    cursed_tx_hash = (
        "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be")
    cursed_tx = TestPlaylistTransaction()
    cursed_tx.transactionHash = update_task.web3.toBytes(hexstr=cursed_tx_hash)
    test_block_number = 25278765
    test_block_timestamp = 1
    test_block_hash = update_task.web3.toHex(block_hash)
    test_playlist_factory_txs = [cursed_tx, blessed_tx]
    test_timestamp = datetime.utcfromtimestamp(test_block_timestamp)
    blessed_playlist_record = Playlist(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=blessed_tx_hash,
        playlist_id=91232,
        is_album=False,
        is_private=False,
        playlist_name="test",
        playlist_contents={},
        playlist_image_multihash=None,
        playlist_image_sizes_multihash=None,
        description="testing!",
        upc=None,
        is_current=True,
        is_delete=True,
        last_added_to=None,
        updated_at=test_timestamp,
        created_at=test_timestamp,
        playlist_owner_id=1,
    )
    cursed_playlist_record = Playlist(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=cursed_tx_hash,
        playlist_id=91238,
        is_album=None,
        is_private=None,
        playlist_name=None,
        playlist_image_multihash=None,
        playlist_image_sizes_multihash=None,
        description=None,
        upc=None,
        is_current=True,
        is_delete=True,
        last_added_to=None,
        updated_at=test_timestamp,
        created_at=None,
    )

    mocker.patch(
        "src.tasks.playlists.lookup_playlist_record",
        side_effect=[cursed_playlist_record, blessed_playlist_record],
        autospec=True,
    )
    mocker.patch(
        "src.tasks.playlists.get_playlist_events_tx",
        side_effect=[
            [],  # no playlist created events
            [
                {
                    "args":
                    AttrDict({
                        "_playlistId": cursed_playlist_record.playlist_id,
                    })
                },
            ],  # playlist deleted event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],  # second tx receipt
            [
                {
                    "args":
                    AttrDict({
                        "_playlistId":
                        blessed_playlist_record.playlist_id,
                    })
                },
            ],  # playlist deleted event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],
        ],
        autospec=True,
    )
    test_ipfs_metadata = {}
    test_blacklisted_cids = {}

    with db.scoped_session() as session:
        try:
            current_block = Block(
                blockhash=test_block_hash,
                parenthash=test_block_hash,
                number=test_block_number,
                is_current=True,
            )
            session.add(current_block)
            (total_changes, updated_playlist_ids_set) = playlist_state_update(
                update_task,
                update_task,
                session,
                test_playlist_factory_txs,
                test_block_number,
                test_block_timestamp,
                block_hash,
                test_ipfs_metadata,
                test_blacklisted_cids,
            )
            assert len(updated_playlist_ids_set) == 1
            assert (list(updated_playlist_ids_set)[0] ==
                    blessed_playlist_record.playlist_id)
            assert total_changes == 1
            assert (session.query(SkippedTransaction).filter(
                SkippedTransaction.txhash == cursed_playlist_record.txhash,
                SkippedTransaction.level == SkippedTransactionLevel.node,
            ).first())
            assert (session.query(Playlist).filter(
                Playlist.playlist_id ==
                blessed_playlist_record.playlist_id).first())
            assert (session.query(Playlist).filter(
                Playlist.playlist_id ==
                cursed_playlist_record.playlist_id).first()) == None
        except Exception:
            assert False