def setup_verified_test(session):
    # Setup
    blocks = [
        Block(blockhash="0x1", number=1, parenthash="", is_current=False),
        Block(blockhash="0x2", number=2, parenthash="", is_current=True),
    ]
    users = [
        User(
            blockhash="0x1",
            blocknumber=1,
            user_id=1,
            is_current=True,
            wallet="0xFakeWallet1",
            created_at=datetime.now(),
            updated_at=datetime.now(),
            is_verified=False,
        ),
        User(
            blockhash="0x2",
            blocknumber=2,
            user_id=2,
            is_current=True,
            wallet="0xFakeWallet2",
            created_at=datetime.now(),
            updated_at=datetime.now(),
            is_verified=True,
        ),
    ]

    challenges = [
        Challenge(
            id="referrals",
            type=ChallengeType.aggregate,
            active=True,
            amount="1",
            step_count=5,
        ),
        Challenge(
            id="ref-v",
            type=ChallengeType.aggregate,
            active=True,
            amount="1",
            step_count=500,
        ),
    ]

    # Wipe any existing challenges in the DB from running migrations, etc
    session.query(Challenge).delete()
    session.commit()
    session.add_all(blocks)
    session.commit()
    session.add_all(users)
    session.add_all(challenges)
    session.commit()

    redis_conn = redis.Redis.from_url(url=REDIS_URL)
    bus = ChallengeEventBus(redis_conn)
    bus.register_listener(DEFAULT_EVENT, referral_challenge_manager)
    bus.register_listener(DEFAULT_EVENT, verified_referral_challenge_manager)
    return bus
Exemplo n.º 2
0
def initialize_blocks_table_if_necessary(db):
    target_blockhash = None
    target_blockhash = update_task.shared_config["discprov"]["start_block"]
    target_block = update_task.web3.eth.getBlock(target_blockhash, True)

    with db.scoped_session() as session:
        current_block_query_result = session.query(Block).filter_by(is_current=True)
        if current_block_query_result.count() == 0:
            blocks_query_result = session.query(Block)
            assert (
                blocks_query_result.count() == 0
            ), "Corrupted DB State - Expect single row marked as current"
            block_model = Block(
                blockhash=target_blockhash,
                number=target_block.number,
                parenthash=target_blockhash,
                is_current=True,
            )

            if target_block.number == 0:
                block_model.number = None

            session.add(block_model)
            logger.info(f"index.py | initialize_blocks_table_if_necessary | Initializing blocks table - {block_model}")
        else:
            assert (
                current_block_query_result.count() == 1
            ), "Expected SINGLE row marked as current"

    return target_blockhash
Exemplo n.º 3
0
def setup_challenges(session):
    block = Block(blockhash="0x1", number=BLOCK_NUMBER)
    user = User(
        blockhash="0x1",
        blocknumber=BLOCK_NUMBER,
        txhash="xyz",
        user_id=1,
        is_current=True,
        handle="TestHandle",
        handle_lc="testhandle",
        wallet="0x1",
        is_creator=False,
        is_verified=False,
        name="test_name",
        created_at=datetime.now(),
        updated_at=datetime.now(),
    )
    session.add(block)
    session.flush()
    session.add(user)
    session.flush()
    session.query(Challenge).filter(Challenge.id == "listen-streak").update({
        "active":
        True,
        "starting_block":
        BLOCK_NUMBER
    })
Exemplo n.º 4
0
def test_get_health_challenge_events_max_drift(web3_mock, redis_mock, db_mock):
    """Tests that the health check honors an unhealthy challenge events drift"""

    # Set up web3 eth
    def get_block(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 50
        block.hash = HexBytes(b"\x50")
        return block

    cache_play_health_vars(redis_mock)
    web3_mock.eth.get_block = get_block

    # Set up redis state
    redis_mock.set(challenges_last_processed_event_redis_key, int(time() - 50))

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x01",
                number=1,
                parenthash="0x01",
                is_current=True,
            ))

    args = {"challenge_events_age_max_drift": 49}
    health_results, error = get_health(args)

    assert error == True
    assert health_results["challenge_last_event_age_sec"] < int(time() - 49)
def test_resolve_user_url(app):
    """Tests that it resolves a user url"""
    with app.test_request_context():
        db = get_db()
        with db.scoped_session() as session:
            session.add(
                Block(
                    blockhash=
                    "0x2969e88561fac17ca19c1749cb3e614211ba15c8e471be55de47d0b8ca6acf5f",
                    parenthash="0x0000000000000000000000000000000000000000",
                    number=16914541,
                    is_current=True,
                ))
            session.flush()
            session.add(
                User(
                    blockhash=
                    "0x2969e88561fac17ca19c1749cb3e614211ba15c8e471be55de47d0b8ca6acf5f",
                    is_current=True,
                    updated_at=datetime.now(),
                    created_at=datetime.now(),
                    blocknumber=16914541,
                    handle="Urbanbankai",
                    handle_lc="urbanbankai",
                    user_id=42727,
                    primary_id=1,
                    secondary_ids=[2, 3],
                ))
            url = "https://audius.co/urbanbankai"
            resolved_url = resolve_url(session, url)

            assert resolved_url == "/v1/users/DE677"
def populate_mock_db(db, test_tracks, date):
    """Helper function to populate thee mock DB with plays"""
    with db.scoped_session() as session:
        for i, track_meta in enumerate(test_tracks):
            blockhash = hex(i)
            block = Block(
                blockhash=blockhash,
                number=i,
                parenthash="0x01",
                is_current=(i == 0),
            )
            track = Track(
                blockhash=hex(i),
                blocknumber=i,
                track_id=i,
                is_current=track_meta.get("is_current", True),
                is_delete=track_meta.get("is_delete", False),
                owner_id=300,
                route_id="",
                track_segments=[],
                genre=track_meta.get("genre", ""),
                updated_at=track_meta.get("updated_at", date),
                created_at=track_meta.get("created_at", date),
                is_unlisted=track_meta.get("is_unlisted", False),
            )
            # add block and then flush before
            # adding track, bc track.blocknumber foreign key
            # references block
            session.add(block)
            session.flush()
            session.add(track)
def test_get_block_confirmation(web3_mock, redis_mock, db_mock):
    """Tests confirmation of block given a blockhash and a blocknumber"""

    # Set up db state
    blockhash, blocknumber = "0x01", 1
    latest_blockhash, latest_blocknumber = "0x02", 2
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x00",
                number=0,
                parenthash=None,
                is_current=False,
            ))
        session.add(
            Block(
                blockhash=blockhash,
                number=blocknumber,
                parenthash="0x00",
                is_current=False,
            ))
        session.add(
            Block(
                blockhash=latest_blockhash,
                number=latest_blocknumber,
                parenthash=blockhash,
                is_current=True,
            ))

    block_confirmation = get_block_confirmation(blockhash, blocknumber)
    assert block_confirmation["block_found"] == True
    assert block_confirmation["block_passed"] == True

    latest_block_confirmation = get_block_confirmation(latest_blockhash,
                                                       latest_blocknumber)
    assert latest_block_confirmation["block_found"] == True
    assert latest_block_confirmation["block_passed"] == True

    new_block_confirmation = get_block_confirmation("0xfe", 2)
    assert new_block_confirmation["block_found"] == False
    assert new_block_confirmation["block_passed"] == True

    new_block_confirmation = get_block_confirmation("0xff", 3)
    assert new_block_confirmation["block_found"] == False
    assert new_block_confirmation["block_passed"] == False
def test_first_tip_challenge(app):
    redis_conn = redis.Redis.from_url(url=REDIS_URL)

    with app.app_context():
        db = get_db()

    block = Block(blockhash="0x1", number=BLOCK_NUMBER)
    user = User(
        blockhash="0x1",
        blocknumber=BLOCK_NUMBER,
        txhash="xyz",
        user_id=1,
        is_current=True,
        handle="TestHandle",
        handle_lc="testhandle",
        wallet="0x1",
        is_creator=False,
        is_verified=False,
        name="test_name",
        created_at=datetime.now(),
        updated_at=datetime.now(),
    )

    with db.scoped_session() as session:
        bus = ChallengeEventBus(redis_conn)
        session.query(Challenge).filter(
            Challenge.id == "send-first-tip").update({
                "active":
                True,
                "starting_block":
                BLOCK_NUMBER
            })

        # Register events with the bus
        bus.register_listener(ChallengeEvent.send_tip,
                              send_first_tip_challenge_manager)

        session.add(block)
        session.flush()
        session.add(user)
        session.flush()

        bus.dispatch(
            ChallengeEvent.send_tip,
            BLOCK_NUMBER,
            1,  # user_id
            {},
        )

        bus.flush()
        bus.process_events(session)
        session.flush()

        state = send_first_tip_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]

        assert state.is_complete
Exemplo n.º 9
0
def test_get_health_with_monitors(web3_mock, redis_mock, db_mock,
                                  get_monitors_mock):
    """Tests that the health check returns monitor data"""
    get_monitors_mock.return_value = {
        "database_connections": 2,
        "filesystem_size": 62725623808,
        "filesystem_used": 50381168640,
        "received_bytes_per_sec": 7942.038197103973,
        "total_memory": 6237151232,
        "used_memory": 3055149056,
        "transferred_bytes_per_sec": 7340.780857447676,
    }

    # Set up web3 eth
    def get_block(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.get_block = get_block
    cache_play_health_vars(redis_mock)

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x01",
                number=1,
                parenthash="0x01",
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args)
    assert error == False
    assert health_results["database_connections"] == 2
    assert health_results["filesystem_size"] == 62725623808
    assert health_results["filesystem_used"] == 50381168640
    assert health_results["received_bytes_per_sec"] == 7942.038197103973
    assert health_results["total_memory"] == 6237151232
    assert health_results["used_memory"] == 3055149056
    assert health_results["transferred_bytes_per_sec"] == 7340.780857447676
    assert health_results["number_of_cpus"] == os.cpu_count()
Exemplo n.º 10
0
def test_get_health_with_monitors(web3_mock, redis_mock, db_mock,
                                  get_monitors_mock):
    """Tests that the health check returns monitor data"""
    get_monitors_mock.return_value = {
        'database_connections': 2,
        'filesystem_size': 62725623808,
        'filesystem_used': 50381168640,
        'received_bytes_per_sec': 7942.038197103973,
        'total_memory': 6237151232,
        'used_memory': 3055149056,
        'transferred_bytes_per_sec': 7340.780857447676
    }

    # Set up web3 eth
    def getBlock(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.getBlock = getBlock

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash='0x01',
                number=1,
                parenthash='0x01',
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args)
    assert error == False
    assert health_results['database_connections'] == 2
    assert health_results['filesystem_size'] == 62725623808
    assert health_results['filesystem_used'] == 50381168640
    assert health_results['received_bytes_per_sec'] == 7942.038197103973
    assert health_results['total_memory'] == 6237151232
    assert health_results['used_memory'] == 3055149056
    assert health_results['transferred_bytes_per_sec'] == 7340.780857447676
    assert health_results['number_of_cpus'] == os.cpu_count()
Exemplo n.º 11
0
def test_get_health_skip_redis(web3_mock, redis_mock, db_mock):
    """Tests that the health check skips returnning redis data first if explicitly disabled"""

    # Set up web3 eth
    def get_block(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    cache_play_health_vars(redis_mock)
    web3_mock.eth.get_block = get_block

    # Set up redis state
    redis_mock.set(latest_block_redis_key, "3")
    redis_mock.set(latest_block_hash_redis_key, "0x3")
    redis_mock.set(most_recent_indexed_block_redis_key, "2")
    redis_mock.set(most_recent_indexed_block_hash_redis_key, "0x02")

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x01",
                number=1,
                parenthash="0x01",
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args, use_redis_cache=False)

    assert error == False

    assert health_results["web"]["blocknumber"] == 2
    assert health_results["web"]["blockhash"] == "0x02"
    assert health_results["db"]["number"] == 1
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 1

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
def test_get_health_using_redis(web3_mock, redis_mock, db_mock):
    """Tests that the health check returns redis data first"""

    # Set up web3 eth
    def getBlock(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.getBlock = getBlock

    # Set up redis state
    redis_mock.set(latest_block_redis_key, '3')
    redis_mock.set(latest_block_hash_redis_key, '0x3')
    redis_mock.set(most_recent_indexed_block_redis_key, '2')
    redis_mock.set(most_recent_indexed_block_hash_redis_key, '0x02')

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash='0x01',
                number=1,
                parenthash='0x01',
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args)

    assert error == False

    assert health_results["web"]["blocknumber"] == 3
    assert health_results["web"]["blockhash"] == "0x3"
    assert health_results["db"]["number"] == 2
    assert health_results["db"]["blockhash"] == "0x02"
    assert health_results["block_difference"] == 1

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
def setup_trending(db):
    # pylint: disable=W0621
    with db.scoped_session() as session:
        # seed tracks + blocks
        for i, track_meta in enumerate(test_tracks):
            blockhash = hex(i)
            block = Block(
                blockhash=blockhash,
                number=i,
                parenthash='0x01',
                is_current=True,
            )

            track = Track(blockhash=blockhash,
                          blocknumber=i,
                          track_id=i,
                          is_current=track_meta.get("is_current", True),
                          is_delete=track_meta.get("is_delete", False),
                          owner_id=300,
                          route_id='',
                          track_segments=[],
                          genre=track_meta.get("genre", ""),
                          updated_at=track_meta.get("updated_at",
                                                    datetime.now()),
                          created_at=track_meta.get("created_at",
                                                    datetime.now()),
                          is_unlisted=track_meta.get("is_unlisted", False))

            # add block and then flush before
            # adding track, bc track.blocknumber foreign key
            # references block
            session.add(block)
            session.flush()
            session.add(track)

        # seed plays
        for i, play_meta in enumerate(test_plays):
            play = Play(id=i,
                        play_item_id=play_meta.get("item_id"),
                        created_at=play_meta.get("created_at", datetime.now()))
            session.add(play)
Exemplo n.º 14
0
def test_get_health(web3_mock, redis_mock, db_mock):
    """Tests that the health check returns db data"""

    # Set up web3 eth
    def get_block(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    cache_play_health_vars(redis_mock)
    web3_mock.eth.get_block = get_block

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x01",
                number=1,
                parenthash="0x01",
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args)

    assert error == False

    assert health_results["web"]["blocknumber"] == 2
    assert health_results["web"]["blockhash"] == "0x02"
    assert health_results["db"]["number"] == 1
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 1
    assert health_results["plays"]["tx_info"]["slot_diff"] == 3

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
Exemplo n.º 15
0
def test_get_health_unhealthy_block_difference(web3_mock, redis_mock, db_mock):
    """Tests that the health check an unhealthy block difference"""

    # Set up web3 eth
    def get_block(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 50
        block.hash = HexBytes(b"\x50")
        return block

    cache_play_health_vars(redis_mock)
    web3_mock.eth.get_block = get_block

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash="0x01",
                number=1,
                parenthash="0x01",
                is_current=True,
            ))

    args = {"enforce_block_diff": True, "healthy_block_diff": 40}
    health_results, error = get_health(args)

    assert error == True

    assert health_results["web"]["blocknumber"] == 50
    assert health_results["web"]["blockhash"] == "0x50"
    assert health_results["db"]["number"] == 1
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 49

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
def test_get_health_with_invalid_db_state(web3_mock, redis_mock, db_mock):
    """Tests that the health check can handle an invalid block in the db"""

    # Set up web3 eth
    def getBlock(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.getBlock = getBlock

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash='0x01',
                number=None,  # NoneType
                parenthash='0x01',
                is_current=True,
            ))

    args = {}
    health_results, error = get_health(args)

    assert error == False

    assert health_results["web"]["blocknumber"] == 2
    assert health_results["web"]["blockhash"] == "0x02"
    assert health_results["db"]["number"] == 0
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 2

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
Exemplo n.º 17
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        block = blocks_list[i]
        block_index = num_blocks - i
        block_number = block.number
        block_timestamp = block.timestamp
        logger.info(
            f"index.py | index_blocks | {self.request.id} | block {block.number} - {block_index}/{num_blocks}"
        )

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(
                is_current=True)

            # Without this check we may end up duplicating an insert operation
            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (current_block_query.count() == 1
                    ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            tx_receipt_dict = fetch_tx_receipts(self, block.transactions)

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions,
                                key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = tx_receipt_dict[tx_hash]

                # Handle user operations
                if tx_target_contract_address == contract_addresses[
                        "user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses[
                        "track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}")
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses[
                        "social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses[
                        "playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses[
                        "user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}")
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            total_user_changes, user_ids = user_state_update(
                self, update_task, session, user_factory_txs, block_number,
                block_timestamp)
            user_state_changed = total_user_changes > 0

            total_track_changes, track_ids = track_state_update(
                self, update_task, session, track_factory_txs, block_number,
                block_timestamp)
            track_state_changed = total_track_changes > 0

            social_feature_state_changed = (  # pylint: disable=W0612
                social_feature_state_update(self, update_task, session,
                                            social_feature_factory_txs,
                                            block_number, block_timestamp) > 0)

            # Playlist state operations processed in bulk
            total_playlist_changes, playlist_ids = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number,
                block_timestamp)
            playlist_state_changed = total_playlist_changes > 0

            user_library_state_changed = user_library_state_update(  # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs,
                block_number, block_timestamp)

            track_lexeme_state_changed = (user_state_changed
                                          or track_state_changed)
            session.commit()
            if user_state_changed:
                if user_ids:
                    remove_cached_user_ids(redis, user_ids)
            if track_lexeme_state_changed:
                if track_ids:
                    remove_cached_track_ids(redis, track_ids)
            if playlist_state_changed:
                if playlist_ids:
                    remove_cached_playlist_ids(redis, playlist_ids)

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)
        redis.set(most_recent_indexed_block_hash_redis_key, block.hash.hex())

    if num_blocks > 0:
        logger.warning(
            f"index.py | index_blocks | Indexed {num_blocks} blocks")
def test_profile_completion_challenge_with_playlists(app):

    redis_conn = redis.Redis.from_url(url=REDIS_URL)

    # create user
    with app.app_context():
        db = get_db()

    block = Block(blockhash="0x1", number=BLOCK_NUMBER)
    user = User(
        blockhash="0x1",
        blocknumber=BLOCK_NUMBER,
        txhash="xyz",
        user_id=1,
        is_current=True,
        handle="TestHandle",
        handle_lc="testhandle",
        wallet="0x123",
        is_creator=False,
        is_verified=False,
        name="test_name",
        created_at=datetime.now(),
        updated_at=datetime.now(),
    )

    with db.scoped_session() as session:
        bus = ChallengeEventBus(redis_conn)

        # set challenge as active for purposes of test
        session.query(Challenge).filter(
            Challenge.id == "profile-completion").update({
                "active":
                True,
                "starting_block":
                BLOCK_NUMBER
            })

        # Register events with the bus
        bus.register_listener(ChallengeEvent.profile_update,
                              profile_challenge_manager)
        bus.register_listener(ChallengeEvent.repost, profile_challenge_manager)
        bus.register_listener(ChallengeEvent.follow, profile_challenge_manager)
        bus.register_listener(ChallengeEvent.favorite,
                              profile_challenge_manager)

        session.add(block)
        session.flush()
        session.add(user)

        # Process dummy event just to get this thing initted
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]

        # We should have completed a single step (name)
        assert state.current_step_count == 1 and not state.is_complete

        # Do a repost
        repost = Repost(
            blockhash="0x1",
            blocknumber=BLOCK_NUMBER,
            user_id=1,
            repost_item_id=1,
            repost_type=RepostType.playlist,
            is_current=True,
            is_delete=False,
            created_at=datetime.now(),
        )
        session.add(repost)
        session.flush()
        bus.dispatch(ChallengeEvent.repost, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 2 and not state.is_complete

        # Do a save
        save = Save(
            blockhash="0x1",
            blocknumber=BLOCK_NUMBER,
            user_id=1,
            save_item_id=1,
            save_type=SaveType.playlist,
            is_current=True,
            is_delete=False,
            created_at=datetime.now(),
        )
        session.add(save)
        session.flush()
        bus.dispatch(ChallengeEvent.favorite, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        session.flush()
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 3 and not state.is_complete

        # Do 1 follow, then 5 total follows
        follow = Follow(
            blockhash="0x1",
            blocknumber=BLOCK_NUMBER,
            is_current=True,
            is_delete=False,
            created_at=datetime.now(),
            follower_user_id=1,
            followee_user_id=2,
        )
        session.add(follow)
        session.flush()
        bus.dispatch(ChallengeEvent.follow, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        session.flush()
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        # Assert 1 follow didn't do anything
        assert state.current_step_count == 3 and not state.is_complete
        follows = [
            Follow(
                blockhash="0x1",
                blocknumber=BLOCK_NUMBER,
                is_current=True,
                is_delete=False,
                created_at=datetime.now(),
                follower_user_id=1,
                followee_user_id=3,
            ),
            Follow(
                blockhash="0x1",
                blocknumber=BLOCK_NUMBER,
                is_current=True,
                is_delete=False,
                created_at=datetime.now(),
                follower_user_id=1,
                followee_user_id=4,
            ),
            Follow(
                blockhash="0x1",
                blocknumber=BLOCK_NUMBER,
                is_current=True,
                is_delete=False,
                created_at=datetime.now(),
                follower_user_id=1,
                followee_user_id=5,
            ),
            Follow(
                blockhash="0x1",
                blocknumber=BLOCK_NUMBER,
                is_current=True,
                is_delete=False,
                created_at=datetime.now(),
                follower_user_id=1,
                followee_user_id=6,
            ),
        ]
        session.add_all(follows)
        session.flush()
        bus.dispatch(ChallengeEvent.follow, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 4 and not state.is_complete

        # profile_picture
        session.query(User).filter(User.user_id == 1).update(
            {"profile_picture": "profilepictureurl"})
        session.flush()
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 5 and not state.is_complete

        # profile description
        session.query(User).filter(User.user_id == 1).update(
            {"bio": "profiledescription"})
        session.flush()
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 6 and not state.is_complete

        # Undo it, ensure that our count goes down
        session.query(User).filter(User.user_id == 1).update({"bio": None})
        session.flush()
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 5 and not state.is_complete

        # profile_cover_photo
        session.query(User).filter(User.user_id == 1).update({
            "bio":
            "profiledescription",
            "cover_photo":
            "test_cover_photo"
        })
        session.flush()
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 7 and state.is_complete == True

        # ensure that if we lose some data now that the thing is complete, we don't change the status of the challenge
        session.query(User).filter(User.user_id == 1).update(
            {"cover_photo": None})
        session.flush()
        bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1)
        bus.flush()
        bus.process_events(session)
        state = profile_challenge_manager.get_user_challenge_state(
            session, ["1"])[0]
        assert state.current_step_count == 7 and state.is_complete == True
def setup_extra_metadata_test(session):
    blocks = [Block(blockhash="0x1", number=1, parenthash="", is_current=True)]
    users = [
        User(
            blockhash="0x1",
            blocknumber=1,
            user_id=1,
            is_current=True,
            wallet="0x38C68fF3926bf4E68289672F75ee1543117dD9B3",
            created_at=datetime.now(),
            updated_at=datetime.now(),
        )
    ]
    challenges = [
        # Test numeric challenges
        # Numeric 1 with default extra data, no completion
        Challenge(
            id="numeric_1",
            type=ChallengeType.numeric,
            active=True,
            amount="5",
            step_count=5,
        ),
        # Numeric 2 with some extra data
        Challenge(
            id="numeric_2",
            type=ChallengeType.numeric,
            active=True,
            amount="5",
            step_count=5,
        ),
    ]

    user_challenges = [
        UserChallenge(
            challenge_id="numeric_2",
            user_id=1,
            specifier="1",
            is_complete=False,
            current_step_count=5,
        ),
    ]

    session.query(Challenge).delete()
    session.commit()
    session.add_all(blocks)
    session.commit()
    session.add_all(users)
    session.commit()
    session.add_all(challenges)
    session.commit()
    session.add_all(user_challenges)
    session.commit()

    redis_conn = redis.Redis.from_url(url=REDIS_URL)
    bus = ChallengeEventBus(redis_conn)
    bus.register_listener(
        DEFAULT_EVENT, ChallengeManager("numeric_1", NumericCustomUpdater()))
    bus.register_listener(
        DEFAULT_EVENT, ChallengeManager("numeric_2", NumericCustomUpdater()))
    return bus
Exemplo n.º 20
0
def test_user_indexing_skip_tx(bus_mock: mock.MagicMock, app, mocker):
    """Tests that users skip cursed txs without throwing an error and are able to process other tx in block"""
    with app.app_context():
        db = get_db()
        redis = get_redis()
        web3 = Web3()
        bus_mock(redis)
        update_task = DatabaseTask(
            cid_metadata_client=cid_metadata_client,
            web3=web3,
            challenge_event_bus=bus_mock,
            redis=redis,
        )

    class TestUserTransaction:
        def __init__(self):
            self.transactionHash = None

        pass

    blessed_tx_hash = (
        "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3")
    blessed_tx = TestUserTransaction()
    blessed_tx.transactionHash = update_task.web3.toBytes(
        hexstr=blessed_tx_hash)
    cursed_tx_hash = (
        "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be")
    cursed_tx = TestUserTransaction()
    cursed_tx.transactionHash = update_task.web3.toBytes(hexstr=cursed_tx_hash)
    test_block_number = 25278765
    test_block_timestamp = 1
    test_block_hash = update_task.web3.toHex(block_hash)
    test_user_factory_txs = [cursed_tx, blessed_tx]
    test_timestamp = datetime.utcfromtimestamp(test_block_timestamp)
    blessed_user_record = User(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=blessed_tx_hash,
        user_id=91232,
        name="tobey maguire",
        is_creator=False,
        is_current=True,
        updated_at=test_timestamp,
        created_at=test_timestamp,
    )
    cursed_user_record = User(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=cursed_tx_hash,
        user_id=91238,
        name="birbs",
        is_current=None,
        is_creator=None,
        updated_at=test_timestamp,
        created_at=None,
    )

    mocker.patch(
        "src.tasks.users.lookup_user_record",
        side_effect=[cursed_user_record, blessed_user_record],
        autospec=True,
    )
    mocker.patch(
        "src.tasks.users.get_user_events_tx",
        side_effect=[
            [],  # no user added events
            [],
            [
                {
                    "args":
                    AttrDict({
                        "_userId": cursed_user_record.user_id,
                        "_name": cursed_user_record.name.encode("utf-8"),
                    })
                },
            ],  # update name event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],  # second tx receipt
            [],
            [
                {
                    "args":
                    AttrDict({
                        "_userId": blessed_user_record.user_id,
                        "_name": blessed_user_record.name.encode("utf-8"),
                    })
                },
            ],  # update name event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
        ],
        autospec=True,
    )
    test_ipfs_metadata: Dict[str, Any] = {}
    test_blacklisted_cids: Dict[str, Any] = {}

    with db.scoped_session() as session, bus_mock.use_scoped_dispatch_queue():
        try:
            current_block = Block(
                blockhash=test_block_hash,
                parenthash=test_block_hash,
                number=test_block_number,
                is_current=True,
            )
            session.add(current_block)
            (total_changes, updated_user_ids_set) = user_state_update(
                update_task,
                update_task,
                session,
                test_user_factory_txs,
                test_block_number,
                test_block_timestamp,
                block_hash,
                test_ipfs_metadata,
                test_blacklisted_cids,
            )
            assert len(updated_user_ids_set) == 1
            assert list(updated_user_ids_set)[0] == blessed_user_record.user_id
            assert total_changes == 1
            assert (session.query(SkippedTransaction).filter(
                SkippedTransaction.txhash == cursed_user_record.txhash,
                SkippedTransaction.level == SkippedTransactionLevel.node,
            ).first())
            assert (session.query(User).filter(
                User.user_id == blessed_user_record.user_id).first())
            assert (session.query(User).filter(
                User.user_id == cursed_user_record.user_id).first()) == None
        except Exception:
            assert False
def setup_listen_streak_challenge(session):
    # Setup
    blocks = [
        Block(blockhash="0x1", number=1, parenthash="", is_current=False),
        Block(blockhash="0x2", number=2, parenthash="", is_current=True),
    ]
    users = [
        User(
            blockhash="0x1",
            blocknumber=1,
            user_id=1,
            is_current=True,
            wallet="0xFakeWallet1",
            created_at=datetime.now(),
            updated_at=datetime.now(),
            is_verified=False,
        ),
        User(
            blockhash="0x2",
            blocknumber=2,
            user_id=2,
            is_current=True,
            wallet="0xFakeWallet2",
            created_at=datetime.now(),
            updated_at=datetime.now(),
            is_verified=True,
        ),
    ]

    challenges = [
        Challenge(
            id="listen-streak",
            type=ChallengeType.numeric,
            active=True,
            amount="1",
            step_count=7,
        )
    ]

    user_challenges = [
        UserChallenge(
            challenge_id="listen-streak",
            user_id=1,
            specifier="1",
            is_complete=False,
            current_step_count=5,
        ),
        UserChallenge(
            challenge_id="listen-streak",
            user_id=2,
            specifier="2",
            is_complete=False,
            current_step_count=5,
        ),
    ]

    listen_streak_challenges = [
        ListenStreakChallenge(
            user_id=1,
            last_listen_date=datetime.now() - timedelta(hours=12),
            listen_streak=5,
        ),
        ListenStreakChallenge(
            user_id=2,
            last_listen_date=datetime.now() - timedelta(hours=50),
            listen_streak=5,
        ),
    ]

    # Wipe any existing challenges in the DB from running migrations, etc
    session.query(Challenge).delete()
    session.commit()
    session.add_all(blocks)
    session.commit()
    session.add_all(users)
    session.add_all(challenges)
    session.commit()
    session.add_all(user_challenges)
    session.commit()
    session.add_all(listen_streak_challenges)
    session.commit()

    redis_conn = redis.Redis.from_url(url=REDIS_URL)
    bus = ChallengeEventBus(redis_conn)
    bus.register_listener(DEFAULT_EVENT, listen_streak_challenge_manager)
    return bus
Exemplo n.º 22
0
def index_blocks(self, db, blocks_list):
    web3 = update_task.web3
    redis = update_task.redis

    num_blocks = len(blocks_list)
    block_order_range = range(len(blocks_list) - 1, -1, -1)
    for i in block_order_range:
        if i % 10 == 0 and i != 0:
            block_index = num_blocks - i
            logger.info(f"index.py | index_blocks | processing block {block_index}/{num_blocks} blocks")

        block = blocks_list[i]
        block_number = block.number
        block_timestamp = block.timestamp

        # Handle each block in a distinct transaction
        with db.scoped_session() as session:
            current_block_query = session.query(Block).filter_by(is_current=True)

            block_model = Block(
                blockhash=web3.toHex(block.hash),
                parenthash=web3.toHex(block.parentHash),
                number=block.number,
                is_current=True,
            )

            # Update blocks table after
            assert (
                current_block_query.count() == 1
            ), "Expected single row marked as current"

            former_current_block = current_block_query.first()
            former_current_block.is_current = False
            session.add(block_model)

            user_factory_txs = []
            track_factory_txs = []
            social_feature_factory_txs = []
            playlist_factory_txs = []
            user_library_factory_txs = []

            # Sort transactions by hash
            sorted_txs = sorted(block.transactions, key=lambda entry: entry['hash'])

            # Parse tx events in each block
            for tx in sorted_txs:
                tx_hash = web3.toHex(tx["hash"])
                tx_target_contract_address = tx["to"]
                tx_receipt = web3.eth.getTransactionReceipt(tx_hash)

                # Handle user operations
                if tx_target_contract_address == contract_addresses["user_factory"]:
                    logger.info(
                        f"index.py | index_blocks | UserFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}, adding to user_factory_txs to process in bulk"
                    )
                    user_factory_txs.append(tx_receipt)

                # Handle track operations
                if tx_target_contract_address == contract_addresses["track_factory"]:
                    logger.info(
                        f"index.py | index_blocks | TrackFactory contract addr: {tx_target_contract_address}"
                        f" tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    # Track state operations
                    track_factory_txs.append(tx_receipt)

                # Handle social operations
                if tx_target_contract_address == contract_addresses["social_feature_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Social feature contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    social_feature_factory_txs.append(tx_receipt)

                # Handle repost operations
                if tx_target_contract_address == contract_addresses["playlist_factory"]:
                    logger.info(
                        f"index.py | index_blocks | Playlist contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    playlist_factory_txs.append(tx_receipt)

                # Handle User Library operations
                if tx_target_contract_address == contract_addresses["user_library_factory"]:
                    logger.info(
                        f"index.py | index_blocks | User Library contract addr: {tx_target_contract_address}"
                        f"tx from block - {tx}, receipt - {tx_receipt}"
                    )
                    user_library_factory_txs.append(tx_receipt)

            # bulk process operations once all tx's for block have been parsed
            user_state_changed = (
                user_state_update(
                    self, update_task, session, user_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            track_state_changed = (
                track_state_update(
                    self, update_task, session, track_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            social_feature_state_changed = ( # pylint: disable=W0612
                social_feature_state_update(
                    self, update_task, session, social_feature_factory_txs, block_number, block_timestamp
                )
                > 0
            )

            # Playlist state operations processed in bulk
            playlist_state_changed = playlist_state_update(
                self, update_task, session, playlist_factory_txs, block_number, block_timestamp
            )

            user_library_state_changed = user_library_state_update( # pylint: disable=W0612
                self, update_task, session, user_library_factory_txs, block_number, block_timestamp
            )

            # keep search materialized view in sync with db
            # only refresh track_lexeme_dict when necessary
            # social state changes are not factored in since they don't affect track_lexeme_dict
            # write out all pending transactions to db before refreshing view
            track_lexeme_state_changed = (user_state_changed or track_state_changed)
            session.flush()
            if user_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict")
            if track_lexeme_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict")
            if playlist_state_changed:
                session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict")
                session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict")

        # add the block number of the most recently processed block to redis
        redis.set(most_recent_indexed_block_redis_key, block.number)

    if num_blocks > 0:
        logger.warning(f"index.py | index_blocks | Indexed {num_blocks} blocks")
def setup_db(session):
    blocks = [Block(blockhash="0x1", number=1, parenthash="", is_current=True)]
    users = [
        User(
            blockhash="0x1",
            blocknumber=1,
            user_id=1,
            is_current=True,
            wallet="0x38C68fF3926bf4E68289672F75ee1543117dD9B3",
            created_at=datetime.now(),
            updated_at=datetime.now(),
        )
    ]
    challenges = [
        Challenge(
            id="boolean_challenge_1",
            type=ChallengeType.boolean,
            active=True,
            amount="5",
        ),
        Challenge(
            id="boolean_challenge_2",
            type=ChallengeType.boolean,
            active=True,
            amount="5",
        ),
        Challenge(
            id="boolean_challenge_3",
            type=ChallengeType.boolean,
            active=True,
            amount="5",
        ),
        # No progress on this, but active
        # should be returned
        Challenge(
            id="boolean_challenge_4",
            type=ChallengeType.boolean,
            active=True,
            amount="5",
        ),
        # Inactive, with no progress
        Challenge(
            id="boolean_challenge_5",
            type=ChallengeType.boolean,
            active=False,
            amount="5",
        ),
        # Inactive, WITH progress
        Challenge(
            id="boolean_challenge_6",
            type=ChallengeType.boolean,
            active=False,
            amount="5",
        ),
        Challenge(
            id="trending_challenge_1",
            type=ChallengeType.trending,
            active=True,
            amount="5",
        ),
        Challenge(
            id="aggregate_challenge_1",
            type=ChallengeType.aggregate,
            active=True,
            amount="5",
            step_count=3,
        ),
        Challenge(
            id="aggregate_challenge_2",
            type=ChallengeType.aggregate,
            active=True,
            amount="5",
            step_count=2,
        ),
        Challenge(
            id="aggregate_challenge_3",
            type=ChallengeType.aggregate,
            active=True,
            amount="5",
            step_count=2,
        ),
        Challenge(id="trending_1",
                  type=ChallengeType.trending,
                  active=True,
                  amount="5"),
        Challenge(id="trending_2",
                  type=ChallengeType.trending,
                  active=True,
                  amount="5"),
        Challenge(id="trending_3",
                  type=ChallengeType.trending,
                  active=True,
                  amount="5"),
    ]
    user_challenges = [
        # Finished the first challenge, disbursed
        UserChallenge(
            challenge_id="boolean_challenge_1",
            user_id=1,
            specifier="1",
            is_complete=True,
        ),
        # Did finish the second challenge, did not disburse
        UserChallenge(
            challenge_id="boolean_challenge_2",
            user_id=1,
            specifier="1",
            is_complete=True,
        ),
        # Did not finish challenge 3
        UserChallenge(
            challenge_id="boolean_challenge_3",
            user_id=1,
            specifier="1",
            is_complete=False,
        ),
        # Inactive challenge
        UserChallenge(
            challenge_id="boolean_challenge_6",
            user_id=1,
            specifier="1",
            is_complete=True,
        ),
        UserChallenge(
            challenge_id="aggregate_challenge_1",
            user_id=1,
            specifier="1-2",  # compound specifiers, like if user1 invites user2
            is_complete=True,
        ),
        # Ensure that a non-complete user-challenge isn't counted towards
        # aggregate challenge score
        UserChallenge(
            challenge_id="aggregate_challenge_1",
            user_id=1,
            specifier="1-3",
            is_complete=False,
        ),
        UserChallenge(
            challenge_id="aggregate_challenge_2",
            user_id=1,
            specifier="1-2",
            is_complete=True,
        ),
        UserChallenge(
            challenge_id="aggregate_challenge_2",
            user_id=1,
            specifier="1-3",
            is_complete=True,
        ),
        # Trending 1 should be finished and included
        UserChallenge(
            challenge_id="trending_1",
            user_id=1,
            specifier="06-01-2020",
            is_complete=True,
        ),
        # Trending 2 should not be included
        UserChallenge(
            challenge_id="trending_2",
            user_id=1,
            specifier="06-01-2020",
            is_complete=False,
        ),
    ]
    disbursements = [
        ChallengeDisbursement(
            challenge_id="boolean_challenge_1",
            user_id=1,
            amount="5",
            signature="1",
            slot=1,
            specifier="1",
        )
    ]

    # Wipe any existing challenges in the DB from running migrations, etc
    session.query(Challenge).delete()
    session.commit()
    session.add_all(blocks)
    session.commit()
    session.add_all(users)
    session.commit()
    session.add_all(challenges)
    session.commit()
    session.add_all(user_challenges)
    session.commit()
    session.add_all(disbursements)

    redis_conn = redis.Redis.from_url(url=REDIS_URL)
    bus = ChallengeEventBus(redis_conn)
    challenge_types = [
        "boolean_challenge_1",
        "boolean_challenge_2",
        "boolean_challenge_3",
        "boolean_challenge_4",
        "boolean_challenge_5",
        "boolean_challenge_6",
        "trending_challenge_1",
        "aggregate_challenge_1",
        "aggregate_challenge_2",
        "aggregate_challenge_3",
        "trending_1",
        "trending_2",
        "trending_3",
    ]
    for ct in challenge_types:
        bus.register_listener(
            DEFAULT_EVENT,
            ChallengeManager(ct, DefaultUpdater()),
        )
    return bus
Exemplo n.º 24
0
def setup_search(app_module):
    with app_module.app_context():
        db = get_db()

    # Import app so that it'll run migrations against the db
    now = datetime.now()
    blocks = [
        Block(
            blockhash=hex(1),
            number=1,
            parenthash="0x01",
            is_current=False,
        ),
        Block(
            blockhash=hex(2),
            number=2,
            parenthash="0x02",
            is_current=False,
        ),
        Block(
            blockhash=hex(3),
            number=3,
            parenthash="0x03",
            is_current=True,
        ),
    ]
    tracks = [
        Track(
            blockhash=hex(1),
            blocknumber=1,
            track_id=1,
            is_current=True,
            is_delete=False,
            owner_id=1,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="the track 1",
            download={"cid": None, "is_downloadable": False, "requires_follow": False},
        ),
        Track(
            blockhash=hex(2),
            blocknumber=2,
            track_id=2,
            is_current=True,
            is_delete=False,
            owner_id=2,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="the track 2",
            download={"cid": None, "is_downloadable": True, "requires_follow": False},
        ),
        Track(
            blockhash=hex(3),
            blocknumber=3,
            track_id=3,
            is_current=True,
            is_delete=False,
            owner_id=1,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="xyz",
            download={"cid": None, "is_downloadable": True, "requires_follow": False},
        ),
    ]

    # need users for the lexeme dict to work
    users = [
        User(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            is_current=True,
            handle="",
            wallet="",
            name="user 1",
            updated_at=now,
            created_at=now,
        ),
        User(
            blockhash=hex(2),
            blocknumber=2,
            user_id=2,
            is_current=True,
            handle="",
            name="user 2",
            wallet="",
            updated_at=now,
            created_at=now,
        ),
        User(
            blockhash=hex(3),
            blocknumber=3,
            user_id=3,
            is_current=True,
            handle="",
            wallet="",
            name="fdwea",
            updated_at=now,
            created_at=now,
        ),
    ]

    follows = [
        Follow(
            blockhash=hex(1),
            blocknumber=1,
            follower_user_id=2,
            followee_user_id=1,
            is_current=True,
            is_delete=False,
            created_at=now,
        )
    ]

    playlists = [
        Playlist(
            blockhash=hex(1),
            blocknumber=1,
            playlist_id=1,
            playlist_owner_id=1,
            is_album=False,
            is_private=False,
            playlist_name="playlist 1",
            playlist_contents={"track_ids": [{"track": 1, "time": 1}]},
            is_current=True,
            is_delete=False,
            updated_at=now,
            created_at=now,
        ),
        Playlist(
            blockhash=hex(2),
            blocknumber=2,
            playlist_id=2,
            playlist_owner_id=2,
            is_album=True,
            is_private=False,
            playlist_name="album 1",
            playlist_contents={"track_ids": [{"track": 2, "time": 2}]},
            is_current=True,
            is_delete=False,
            updated_at=now,
            created_at=now,
        ),
    ]

    saves = [
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=1,
            save_type=SaveType.track,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=1,
            save_type=SaveType.playlist,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=2,
            save_type=SaveType.album,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
    ]

    balances = [
        UserBalance(
            user_id=1,
            balance=0,
            associated_wallets_balance=0,
            associated_sol_wallets_balance=0,
            waudio=0,
        )
    ]

    with db.scoped_session() as session:
        for block in blocks:
            session.add(block)
            session.flush()
        for track in tracks:
            session.add(track)
        for user in users:
            session.add(user)
            session.flush()
        for follow in follows:
            session.add(follow)
            session.flush()
        for playlist in playlists:
            session.add(playlist)
            session.flush()
        for save in saves:
            session.add(save)
            session.flush()
        for balance in balances:
            session.add(balance)
            session.flush()

        # Refresh the lexeme matview
        _update_aggregate_track(session)
        session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict;")

        session.execute(
            UPDATE_AGGREGATE_USER_QUERY, {"prev_indexed_aggregate_block": 0}
        )
        session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict;")

        session.execute("REFRESH MATERIALIZED VIEW aggregate_playlist;")
        session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict;")
        session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict;")

    try:
        output = subprocess.run(
            ["npm", "run", "dev"],
            env=os.environ,
            capture_output=True,
            text=True,
            cwd="es-indexer",
            timeout=5,
        )
        raise Exception(
            f"Elasticsearch indexing stopped: {output.stderr}. With env: {os.environ}"
        )
    except subprocess.TimeoutExpired as timeout:
        if "catchup done" not in timeout.output.decode("utf-8"):
            raise Exception("Elasticsearch failed to index")
def test_playlist_indexing_skip_tx(app, mocker):
    """Tests that playlists skip cursed txs without throwing an error and are able to process other tx in block"""
    with app.app_context():
        db = get_db()
        cid_metadata_client = CIDMetadataClient({})
        web3 = Web3()
        challenge_event_bus = setup_challenge_bus()
        update_task = UpdateTask(cid_metadata_client, web3,
                                 challenge_event_bus)

    class TestPlaylistTransaction:
        pass

    blessed_tx_hash = (
        "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3")
    blessed_tx = TestPlaylistTransaction()
    blessed_tx.transactionHash = update_task.web3.toBytes(
        hexstr=blessed_tx_hash)
    cursed_tx_hash = (
        "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be")
    cursed_tx = TestPlaylistTransaction()
    cursed_tx.transactionHash = update_task.web3.toBytes(hexstr=cursed_tx_hash)
    test_block_number = 25278765
    test_block_timestamp = 1
    test_block_hash = update_task.web3.toHex(block_hash)
    test_playlist_factory_txs = [cursed_tx, blessed_tx]
    test_timestamp = datetime.utcfromtimestamp(test_block_timestamp)
    blessed_playlist_record = Playlist(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=blessed_tx_hash,
        playlist_id=91232,
        is_album=False,
        is_private=False,
        playlist_name="test",
        playlist_contents={},
        playlist_image_multihash=None,
        playlist_image_sizes_multihash=None,
        description="testing!",
        upc=None,
        is_current=True,
        is_delete=True,
        last_added_to=None,
        updated_at=test_timestamp,
        created_at=test_timestamp,
        playlist_owner_id=1,
    )
    cursed_playlist_record = Playlist(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=cursed_tx_hash,
        playlist_id=91238,
        is_album=None,
        is_private=None,
        playlist_name=None,
        playlist_image_multihash=None,
        playlist_image_sizes_multihash=None,
        description=None,
        upc=None,
        is_current=True,
        is_delete=True,
        last_added_to=None,
        updated_at=test_timestamp,
        created_at=None,
    )

    mocker.patch(
        "src.tasks.playlists.lookup_playlist_record",
        side_effect=[cursed_playlist_record, blessed_playlist_record],
        autospec=True,
    )
    mocker.patch(
        "src.tasks.playlists.get_playlist_events_tx",
        side_effect=[
            [],  # no playlist created events
            [
                {
                    "args":
                    AttrDict({
                        "_playlistId": cursed_playlist_record.playlist_id,
                    })
                },
            ],  # playlist deleted event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],  # second tx receipt
            [
                {
                    "args":
                    AttrDict({
                        "_playlistId":
                        blessed_playlist_record.playlist_id,
                    })
                },
            ],  # playlist deleted event
            [],
            [],
            [],
            [],
            [],
            [],
            [],
            [],
        ],
        autospec=True,
    )
    test_ipfs_metadata = {}
    test_blacklisted_cids = {}

    with db.scoped_session() as session:
        try:
            current_block = Block(
                blockhash=test_block_hash,
                parenthash=test_block_hash,
                number=test_block_number,
                is_current=True,
            )
            session.add(current_block)
            (total_changes, updated_playlist_ids_set) = playlist_state_update(
                update_task,
                update_task,
                session,
                test_playlist_factory_txs,
                test_block_number,
                test_block_timestamp,
                block_hash,
                test_ipfs_metadata,
                test_blacklisted_cids,
            )
            assert len(updated_playlist_ids_set) == 1
            assert (list(updated_playlist_ids_set)[0] ==
                    blessed_playlist_record.playlist_id)
            assert total_changes == 1
            assert (session.query(SkippedTransaction).filter(
                SkippedTransaction.txhash == cursed_playlist_record.txhash,
                SkippedTransaction.level == SkippedTransactionLevel.node,
            ).first())
            assert (session.query(Playlist).filter(
                Playlist.playlist_id ==
                blessed_playlist_record.playlist_id).first())
            assert (session.query(Playlist).filter(
                Playlist.playlist_id ==
                cursed_playlist_record.playlist_id).first()) == None
        except Exception:
            assert False
Exemplo n.º 26
0
def setup_search(db):
    # Import app so that it'll run migrations against the db
    now = datetime.now()
    blocks = [
        Block(
            blockhash=hex(1),
            number=1,
            parenthash='0x01',
            is_current=False,
        ),
        Block(
            blockhash=hex(2),
            number=2,
            parenthash='0x02',
            is_current=False,
        ),
        Block(
            blockhash=hex(3),
            number=3,
            parenthash='0x03',
            is_current=True,
        )
    ]
    tracks = [
        Track(blockhash=hex(1),
              blocknumber=1,
              track_id=1,
              is_current=True,
              is_delete=False,
              owner_id=1,
              route_id='',
              track_segments=[],
              genre="",
              updated_at=now,
              created_at=now,
              is_unlisted=False,
              title="the track 1",
              download={
                  "cid": None,
                  "is_downloadable": False,
                  "requires_follow": False
              }),
        Track(blockhash=hex(2),
              blocknumber=2,
              track_id=2,
              is_current=True,
              is_delete=False,
              owner_id=2,
              route_id='',
              track_segments=[],
              genre="",
              updated_at=now,
              created_at=now,
              is_unlisted=False,
              title="the track 2",
              download={
                  "cid": None,
                  "is_downloadable": True,
                  "requires_follow": False
              }),
        Track(blockhash=hex(3),
              blocknumber=3,
              track_id=3,
              is_current=True,
              is_delete=False,
              owner_id=1,
              route_id='',
              track_segments=[],
              genre="",
              updated_at=now,
              created_at=now,
              is_unlisted=False,
              title="xyz",
              download={
                  "cid": None,
                  "is_downloadable": True,
                  "requires_follow": False
              })
    ]

    # need users for the lexeme dict to work
    users = [
        User(blockhash=hex(1),
             blocknumber=1,
             user_id=1,
             is_current=True,
             handle="",
             wallet="",
             updated_at=now,
             created_at=now),
        User(blockhash=hex(2),
             blocknumber=2,
             user_id=2,
             is_current=True,
             handle="",
             wallet="",
             updated_at=now,
             created_at=now),
        User(blockhash=hex(3),
             blocknumber=3,
             user_id=3,
             is_current=True,
             handle="",
             wallet="",
             updated_at=now,
             created_at=now)
    ]

    with db.scoped_session() as session:
        for block in blocks:
            session.add(block)
            session.flush()
        for track in tracks:
            session.add(track)
        for user in users:
            session.add(user)
            session.flush()

        # Refresh the lexeme matview
        session.execute("REFRESH MATERIALIZED VIEW aggregate_track;")
        session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict;")
def test_user_replica_set_indexing_skip_tx(app, mocker):
    """Tests that URSM indexing skips cursed txs without throwing an error and are able to process other tx in block"""
    with app.app_context():
        db = get_db()
        cid_metadata_client = CIDMetadataClient({})
        web3 = Web3()
        challenge_event_bus = setup_challenge_bus()
        update_task = UpdateTask(cid_metadata_client, web3, challenge_event_bus)

    class TestUserReplicaSetTransaction:
        pass

    blessed_user_tx_hash = (
        "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3"
    )
    blessed_user_tx = TestUserReplicaSetTransaction()
    blessed_user_tx.transactionHash = update_task.web3.toBytes(
        hexstr=blessed_user_tx_hash
    )
    cursed_user_tx_hash = (
        "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be"
    )
    cursed_user_tx = TestUserReplicaSetTransaction()
    cursed_user_tx.transactionHash = update_task.web3.toBytes(
        hexstr=cursed_user_tx_hash
    )
    blessed_cnode_tx_hash = (
        "0x42c66d0542383f06e22ef6a235ddba238050d85562bcbd18667c9711c1daee72"
    )
    blessed_cnode_tx = TestUserReplicaSetTransaction()
    blessed_cnode_tx.transactionHash = update_task.web3.toBytes(
        hexstr=blessed_cnode_tx_hash
    )
    cursed_cnode_tx_hash = (
        "0xa022761e229302abc2490f8bdc7ec0e642916b0f5cbc2908ccd49498243c1806"
    )
    cursed_cnode_tx = TestUserReplicaSetTransaction()
    cursed_cnode_tx.transactionHash = update_task.web3.toBytes(
        hexstr=cursed_cnode_tx_hash
    )
    test_block_number = 25278765
    test_block_timestamp = 1
    test_block_hash = update_task.web3.toHex(block_hash)
    test_user_replica_set_mgr_txs = [
        cursed_user_tx,
        blessed_user_tx,
        cursed_cnode_tx,
        blessed_cnode_tx,
    ]
    test_timestamp = datetime.utcfromtimestamp(test_block_timestamp)
    test_wallet = "0x0birbchickemcatlet"
    blessed_user_record = User(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=blessed_user_tx_hash,
        user_id=1,
        name="tobey maguire",
        is_creator=False,
        is_current=True,
        updated_at=test_timestamp,
        created_at=test_timestamp,
    )
    cursed_user_record = User(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=cursed_user_tx_hash,
        user_id=2,
        name="birb",
        is_current=None,
        is_creator=None,
        updated_at=test_timestamp,
        created_at=None,
    )
    blessed_content_node_record = URSMContentNode(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=blessed_cnode_tx_hash,
        is_current=True,
        cnode_sp_id=1,
        delegate_owner_wallet=test_wallet,
        owner_wallet=test_wallet,
        created_at=test_timestamp,
    )
    cursed_content_node_record = URSMContentNode(
        blockhash=test_block_hash,
        blocknumber=test_block_number,
        txhash=cursed_cnode_tx_hash,
        is_current=None,
        cnode_sp_id=2,
        delegate_owner_wallet=test_wallet,
        created_at=None,
    )

    mocker.patch(
        "src.tasks.user_replica_set.lookup_user_record",
        side_effect=[cursed_user_record, blessed_user_record],
        autospec=True,
    )
    mocker.patch(
        "src.tasks.user_replica_set.lookup_ursm_cnode",
        side_effect=[cursed_content_node_record, blessed_content_node_record],
        autospec=True,
    )
    mocker.patch(
        "src.tasks.user_replica_set.get_endpoint_string_from_sp_ids",
        return_value="http://localhost:4001,http://localhost:4002,",
        autospec=True,
    )
    mocker.patch(
        "src.tasks.user_replica_set.get_ursm_cnode_endpoint",
        return_value="http://localhost:4001,http://localhost:4002,",
        autospec=True,
    )
    mocker.patch(
        # because we do not have the real contract set up in the test
        # we mock the return value of this fn w events parsed from an imaginary tx
        "src.tasks.user_replica_set.get_user_replica_set_mgr_tx",
        side_effect=[
            [
                {
                    "args": AttrDict(
                        {
                            "_userId": cursed_user_record.user_id,
                            "_primaryId": 1,
                            "_secondaryIds": [2, 3, 4],
                            "_signer": "mobey taguire",
                        }
                    )
                },
            ],  # first tx receipt - update replica set
            [],  # first tx receipt - update content node
            [
                {
                    "args": AttrDict(
                        {
                            "_userId": blessed_user_record.user_id,
                            "_primaryId": 1,
                            "_secondaryIds": [2, 3, 4],
                            "_signer": "dirsten kunst",
                        }
                    )
                },
            ],  # second tx receipt - update replica set
            [],  # second tx receipt - update content node
            [],  # third tx receipt - update replica set
            [
                {
                    "args": AttrDict(
                        {
                            "_cnodeSpId": cursed_content_node_record.cnode_sp_id,
                            "_cnodeDelegateOwnerWallet": test_wallet,
                            "_cnodeOwnerWallet": test_wallet,
                            "_proposer1DelegateOwnerWallet": test_wallet,
                            "_proposer2DelegateOwnerWallet": test_wallet,
                            "_proposer3DelegateOwnerWallet": test_wallet,
                            "_proposerSpIds": [1, 2],
                        }
                    )
                },
            ],  # third tx receipt - update content node
            [],  # fourth tx receipt - update replica set
            [
                {
                    "args": AttrDict(
                        {
                            "_cnodeSpId": blessed_content_node_record.cnode_sp_id,
                            "_cnodeDelegateOwnerWallet": test_wallet,
                            "_cnodeOwnerWallet": test_wallet,
                            "_proposer1DelegateOwnerWallet": test_wallet,
                            "_proposer2DelegateOwnerWallet": test_wallet,
                            "_proposer3DelegateOwnerWallet": test_wallet,
                            "_proposerSpIds": [1, 2],
                        }
                    )
                },
            ],  # fourth tx receipt - update content node
        ],
        autospec=True,
    )

    test_ipfs_metadata = {}
    test_blacklisted_cids = {}

    with db.scoped_session() as session:
        try:
            current_block = Block(
                blockhash=test_block_hash,
                parenthash=test_block_hash,
                number=test_block_number,
                is_current=True,
            )
            session.add(current_block)
            (total_changes, updated_user_ids_set) = user_replica_set_state_update(
                update_task,
                update_task,
                session,
                test_user_replica_set_mgr_txs,
                test_block_number,
                test_block_timestamp,
                block_hash,
                test_ipfs_metadata,
                test_blacklisted_cids,
            )
            assert len(updated_user_ids_set) == 1
            assert list(updated_user_ids_set)[0] == blessed_user_record.user_id
            assert total_changes == 2
            assert (
                session.query(SkippedTransaction)
                .filter(
                    SkippedTransaction.txhash == cursed_cnode_tx_hash,
                    SkippedTransaction.level == SkippedTransactionLevel.node,
                )
                .first()
            )
            assert (
                session.query(SkippedTransaction)
                .filter(
                    SkippedTransaction.txhash == cursed_user_tx_hash,
                    SkippedTransaction.level == SkippedTransactionLevel.node,
                )
                .first()
            )
            assert (
                session.query(User)
                .filter(User.user_id == blessed_user_record.user_id)
                .first()
            )
            assert (
                session.query(User)
                .filter(User.user_id == cursed_user_record.user_id)
                .first()
            ) == None
            assert (
                session.query(URSMContentNode)
                .filter(
                    URSMContentNode.cnode_sp_id
                    == blessed_content_node_record.cnode_sp_id
                )
                .first()
            )
            assert (
                session.query(URSMContentNode)
                .filter(
                    URSMContentNode.cnode_sp_id
                    == cursed_content_node_record.cnode_sp_id
                )
                .first()
            ) == None
        except Exception:
            assert False
Exemplo n.º 28
0
def test_get_health_verbose(web3_mock, redis_mock, db_mock, get_monitors_mock):
    """Tests that the health check returns verbose db stats"""
    get_monitors_mock.return_value = {
        'database_connections':
        2,
        'database_connection_info': [{
            'datname': 'audius_discovery',
            'state': 'idle',
            'query': 'COMMIT',
            'wait_event_type': 'Client',
            'wait_event': 'ClientRead'
        }]
    }

    # Set up web3 eth
    def getBlock(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.getBlock = getBlock

    # Set up db state
    with db_mock.scoped_session() as session:
        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash='0x01',
                number=1,
                parenthash='0x01',
                is_current=True,
            ))

    args = {"verbose": True}
    health_results, error = get_health(args)

    assert error == False

    assert health_results["web"]["blocknumber"] == 2
    assert health_results["web"]["blockhash"] == "0x02"
    assert health_results["db"]["number"] == 1
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 1

    assert health_results["db_connections"]["database_connections"] == 2
    assert health_results["db_connections"]["database_connection_info"] == [{
        'datname':
        'audius_discovery',
        'state':
        'idle',
        'query':
        'COMMIT',
        'wait_event_type':
        'Client',
        'wait_event':
        'ClientRead'
    }]

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
def test_get_health_verbose(web3_mock, redis_mock, db_mock):
    """Tests that the health check returns verbose db stats"""

    # Set up web3 eth
    def getBlock(_u1, _u2):  # unused
        block = MagicMock()
        block.number = 2
        block.hash = HexBytes(b"\x02")
        return block

    web3_mock.eth.getBlock = getBlock

    # Set up db state
    with db_mock.scoped_session() as session:
        # Set up mock PG stats
        session.execute('CREATE TABLE pg_stat_database (numbackends INTEGER)')
        session.execute(
            'INSERT INTO pg_stat_database (numbackends) VALUES (1)')
        session.execute(
            'INSERT INTO pg_stat_database (numbackends) VALUES (1)')

        session.execute("""
            CREATE TABLE pg_stat_activity (
                datname STRING,
                query STRING,
                state STRING,
                wait_event STRING,
                wait_event_type STRING
            )
        """)
        session.execute("""
            INSERT INTO pg_stat_activity (datname, query, state, wait_event, wait_event_type)
            VALUES ("audius_discovery", "COMMIT", "idle", "ClientRead", "Client")
        """)

        Block.__table__.create(db_mock._engine)
        session.add(
            Block(
                blockhash='0x01',
                number=1,
                parenthash='0x01',
                is_current=True,
            ))

    args = {"verbose": True}
    health_results, error = get_health(args)

    assert error == False

    assert health_results["web"]["blocknumber"] == 2
    assert health_results["web"]["blockhash"] == "0x02"
    assert health_results["db"]["number"] == 1
    assert health_results["db"]["blockhash"] == "0x01"
    assert health_results["block_difference"] == 1

    assert health_results["db_connections"]["open_connections"] == 2
    assert health_results["db_connections"]["connection_info"] == [{
        'datname':
        'audius_discovery',
        'state':
        'idle',
        'query':
        'COMMIT',
        'wait_event_type':
        'Client',
        'wait_event':
        'ClientRead'
    }]

    assert "maximum_healthy_block_difference" in health_results
    assert "version" in health_results
    assert "service" in health_results
def setup_trending(db, date):
    # Test data

    # test tracks
    # when creating tracks, track_id == index
    test_tracks = [
        {"genre": "Electronic"},
        {"genre": "Pop"},
        {"genre": "Electronic"},
        # Tracks we don't want to count
        {"genre": "Electronic", "is_unlisted": True},
        {"genre": "Electronic", "is_delete": True},
    ]

    test_plays = [
        # Current Plays
        {"item_id": 0},
        {"item_id": 0},
        {"item_id": 1},
        {"item_id": 1},
        {"item_id": 2},
        {"item_id": 3},
        # > 1 wk plays
        {"item_id": 2, "created_at": date - timedelta(weeks=2)},
        {"item_id": 2, "created_at": date - timedelta(weeks=2)},
        {"item_id": 3, "created_at": date - timedelta(weeks=2)},
        # We don't want to count these guys (tracks deleted/unlisted)
        {"item_id": 3},
        {"item_id": 3},
        {"item_id": 4},
        {"item_id": 4},
    ]

    # pylint: disable=W0621
    with db.scoped_session() as session:
        # seed tracks + blocks
        for i, track_meta in enumerate(test_tracks):
            blockhash = hex(i)
            block = Block(
                blockhash=blockhash,
                number=i,
                parenthash="0x01",
                is_current=True,
            )

            track = Track(
                blockhash=blockhash,
                blocknumber=i,
                track_id=i,
                is_current=track_meta.get("is_current", True),
                is_delete=track_meta.get("is_delete", False),
                owner_id=300,
                route_id="",
                track_segments=[],
                genre=track_meta.get("genre", ""),
                updated_at=track_meta.get("updated_at", date),
                created_at=track_meta.get("created_at", date),
                is_unlisted=track_meta.get("is_unlisted", False),
            )

            # add block and then flush before
            # adding track, bc track.blocknumber foreign key
            # references block
            session.add(block)
            session.flush()
            session.add(track)

        # seed plays
        aggregate_plays = {}
        for i, play_meta in enumerate(test_plays):
            item_id = play_meta.get("item_id")
            if item_id in aggregate_plays:
                aggregate_plays[item_id] += 1
            else:
                aggregate_plays[item_id] = 1

            play = Play(
                id=i, play_item_id=item_id, created_at=play_meta.get("created_at", date)
            )
            session.add(play)
        for i, count in aggregate_plays.items():
            session.add(AggregatePlays(play_item_id=i, count=count))
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Extracts data from the first block in a .dat file.

Usage: parse_block.py [path to .dat file]
"""

from src.models import Block
from sys import argv

MAX_BLOCK_SIZE = 1024 ** 2  # 1 MB

file_path = argv[1]
with open(file_path) as f:
    block = Block.from_data(f.read(MAX_BLOCK_SIZE))

print "Hash:", block.hash()[::-1].encode("hex")
print "Magic number: {:x}".format(block.magic_number)
print "Block size:", block.block_size
print "Version:", block.version
print "Hash of previous block:", block.hash_prev_block[::-1].encode("hex")
print "Hash of merkle root:", block.hash_merkle_root[::-1].encode("hex")
print "Time:", block.time
print "Target: {:064x} ({})".format(block.target, block.bits.encode("hex"))
print "Difficulty:", block.difficulty
print "Pool difficulty:", block.pool_difficulty
print "Nonce:", block.nonce
print "Number of transactions:", block.tx_count