예제 #1
0
def parse_user_event(
        self, user_contract, update_task, session, tx_receipt, block_number, entry, event_type, user_record,
        block_timestamp):
    event_args = entry["args"]

    # type specific field changes
    if event_type == user_event_types_lookup["add_user"]:
        handle_str = helpers.bytes32_to_str(event_args._handle)
        user_record.handle = handle_str
        user_record.handle_lc = handle_str.lower()
        user_record.wallet = event_args._wallet.lower()
    elif event_type == user_event_types_lookup["update_multihash"]:
        metadata_multihash = helpers.multihash_digest_to_cid(event_args._multihashDigest)
        is_blacklisted = is_blacklisted_ipld(session, metadata_multihash)
        # If cid is in blacklist, do not update user
        if is_blacklisted:
            logger.info(f"Encountered blacklisted CID {metadata_multihash} in indexing update user metadata multihash")
            return None
        user_record.metadata_multihash = metadata_multihash
    elif event_type == user_event_types_lookup["update_name"]:
        user_record.name = helpers.bytes32_to_str(event_args._name)
    elif event_type == user_event_types_lookup["update_location"]:
        user_record.location = helpers.bytes32_to_str(event_args._location)
    elif event_type == user_event_types_lookup["update_bio"]:
        user_record.bio = event_args._bio
    elif event_type == user_event_types_lookup["update_profile_photo"]:
        profile_photo_multihash = helpers.multihash_digest_to_cid(event_args._profilePhotoDigest)
        is_blacklisted = is_blacklisted_ipld(session, profile_photo_multihash)
        if is_blacklisted:
            logger.info(f"Encountered blacklisted CID {profile_photo_multihash} in indexing update user profile photo")
            return None
        user_record.profile_picture = profile_photo_multihash
    elif event_type == user_event_types_lookup["update_cover_photo"]:
        cover_photo_multihash = helpers.multihash_digest_to_cid(event_args._coverPhotoDigest)
        is_blacklisted = is_blacklisted_ipld(session, cover_photo_multihash)
        if is_blacklisted:
            logger.info(f"Encountered blacklisted CID {cover_photo_multihash} in indexing update user cover photo")
            return None
        user_record.cover_photo = cover_photo_multihash
    elif event_type == user_event_types_lookup["update_is_creator"]:
        user_record.is_creator = event_args._isCreator
    elif event_type == user_event_types_lookup["update_is_verified"]:
        user_record.is_verified = event_args._isVerified
    elif event_type == user_event_types_lookup["update_creator_node_endpoint"]:
        user_record.creator_node_endpoint = event_args._creatorNodeEndpoint

    # New updated_at timestamp
    user_record.updated_at = datetime.utcfromtimestamp(block_timestamp)

    # If creator, look up metadata multihash in IPFS and override with metadata fields
    metadata_overrides = get_metadata_overrides_from_ipfs(
        session, update_task, user_record
    )

    if metadata_overrides:
        # metadata_overrides properties are defined in get_metadata_overrides_from_ipfs
        if metadata_overrides["profile_picture"]:
            user_record.profile_picture = metadata_overrides["profile_picture"]
        if metadata_overrides["profile_picture_sizes"]:
            user_record.profile_picture = metadata_overrides["profile_picture_sizes"]
        if metadata_overrides["cover_photo"]:
            user_record.cover_photo = metadata_overrides["cover_photo"]
        if metadata_overrides["cover_photo_sizes"]:
            user_record.cover_photo = metadata_overrides["cover_photo_sizes"]
        if metadata_overrides["bio"]:
            user_record.bio = metadata_overrides["bio"]
        if metadata_overrides["name"]:
            user_record.name = metadata_overrides["name"]
        if metadata_overrides["location"]:
            user_record.location = metadata_overrides["location"]

    # All incoming profile photos intended to be a directory
    # Any write to profile_picture field is replaced by profile_picture_sizes
    if user_record.profile_picture:
        logger.info(f"users.py | Processing user profile_picture {user_record.profile_picture}")
        user_record.profile_picture_sizes = user_record.profile_picture
        user_record.profile_picture = None

    # All incoming cover photos intended to be a directory
    # Any write to cover_photo field is replaced by cover_photo_sizes
    if user_record.cover_photo:
        logger.info(f"users.py | Processing user cover photo {user_record.cover_photo}")
        user_record.cover_photo_sizes = user_record.cover_photo
        user_record.cover_photo = None
    return user_record
예제 #2
0
def parse_user_event(self, user_contract, update_task, session, tx_receipt,
                     block_number, entry, event_type, user_record,
                     block_timestamp):
    event_args = entry["args"]

    # type specific field changes
    if event_type == user_event_types_lookup["add_user"]:
        handle_str = helpers.bytes32_to_str(event_args._handle)
        user_record.handle = handle_str
        user_record.handle_lc = handle_str.lower()
        user_record.wallet = event_args._wallet.lower()
    elif event_type == user_event_types_lookup["update_multihash"]:
        metadata_multihash = event_args._multihashDigest
        user_record.metadata_multihash = helpers.multihash_digest_to_cid(
            metadata_multihash)
    elif event_type == user_event_types_lookup["update_name"]:
        user_record.name = helpers.bytes32_to_str(event_args._name)
    elif event_type == user_event_types_lookup["update_location"]:
        user_record.location = helpers.bytes32_to_str(event_args._location)
    elif event_type == user_event_types_lookup["update_bio"]:
        user_record.bio = event_args._bio
    elif event_type == user_event_types_lookup["update_profile_photo"]:
        user_record.profile_picture = helpers.multihash_digest_to_cid(
            event_args._profilePhotoDigest)
    elif event_type == user_event_types_lookup["update_cover_photo"]:
        user_record.cover_photo = helpers.multihash_digest_to_cid(
            event_args._coverPhotoDigest)
    elif event_type == user_event_types_lookup["update_is_creator"]:
        user_record.is_creator = event_args._isCreator
    elif event_type == user_event_types_lookup["update_is_verified"]:
        user_record.is_verified = event_args._isVerified
    elif event_type == user_event_types_lookup["update_creator_node_endpoint"]:
        user_record.creator_node_endpoint = event_args._creatorNodeEndpoint

    # New updated_at timestamp
    user_record.updated_at = datetime.utcfromtimestamp(block_timestamp)

    # If creator, look up metadata multihash in IPFS and override with metadata fields
    metadata_overrides = get_metadata_overrides_from_ipfs(
        session, update_task, user_record)

    if metadata_overrides:
        # metadata_overrides properties are defined in get_metadata_overrides_from_ipfs
        if metadata_overrides["profile_picture"]:
            user_record.profile_picture = metadata_overrides["profile_picture"]
        if metadata_overrides["profile_picture_sizes"]:
            user_record.profile_picture = metadata_overrides[
                "profile_picture_sizes"]
        if metadata_overrides["cover_photo"]:
            user_record.cover_photo = metadata_overrides["cover_photo"]
        if metadata_overrides["cover_photo_sizes"]:
            user_record.cover_photo = metadata_overrides["cover_photo_sizes"]
        if metadata_overrides["bio"]:
            user_record.bio = metadata_overrides["bio"]
        if metadata_overrides["name"]:
            user_record.name = metadata_overrides["name"]
        if metadata_overrides["location"]:
            user_record.location = metadata_overrides["location"]

    # Refresh connection for non-creators
    refresh_user_connection(user_record, update_task)

    # if profile_picture CID is of a dir, store under _sizes field instead
    if user_record.profile_picture:
        ipfs = update_task.ipfs_client._api
        logger.warning(
            f"users.py | Processing user profile_picture {user_record.profile_picture}"
        )
        try:
            # attempt to cat single byte from CID to determine if dir or file
            ipfs.cat(user_record.profile_picture, 0, 1)
        except Exception as e:  # pylint: disable=W0703
            if "this dag node is a directory" in str(e):
                user_record.profile_picture_sizes = user_record.profile_picture
                user_record.profile_picture = None
                logger.warning(
                    f'users.py | Successfully processed CID - {user_record.profile_picture_sizes}'
                )
            else:
                raise Exception(e)

    # if cover_photo CID is of a dir, store under _sizes field instead
    if user_record.cover_photo:
        ipfs = update_task.ipfs_client._api
        logger.warning(
            f"users.py | Processing user cover_photo {user_record.cover_photo}"
        )
        try:
            # attempt to cat single byte from CID to determine if dir or file
            ipfs.cat(user_record.cover_photo, 0, 1)
        except Exception as e:  # pylint: disable=W0703
            if "this dag node is a directory" in str(e):
                user_record.cover_photo_sizes = user_record.cover_photo
                user_record.cover_photo = None
                logger.warning(
                    f'users.py | Successfully processed CID - {user_record.cover_photo_sizes}'
                )
            else:
                raise Exception(e)

    # Find out if a user is ready to query in the db. If they are, set the is_ready field
    user_record.is_ready = is_user_ready(user_record)

    return user_record
예제 #3
0
def test_index_users(bus_mock: mock.MagicMock, app):
    """Tests that users are indexed correctly"""
    with app.app_context():
        db = get_db()
        redis = get_redis()
        web3 = Web3()
        bus_mock(redis)
        update_task = DatabaseTask(
            cid_metadata_client=cid_metadata_client,
            web3=web3,
            challenge_event_bus=bus_mock,
            redis=redis,
        )

    with db.scoped_session() as session, bus_mock.use_scoped_dispatch_queue():
        # ================== Test Add User Event ==================
        event_type, entry = get_add_user_event()

        block_number = 1
        block_timestamp = 1585336422

        # Some sqlalchemy user instance
        user_record = lookup_user_record(
            update_task,
            session,
            entry,
            block_number,
            block_timestamp,
            "0x"  # txhash
        )

        assert user_record.updated_at == None

        # Fields set to None by default
        assert user_record.handle == None
        assert user_record.handle_lc == None
        assert user_record.wallet == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # updated_at should be updated every parse_user_event
        assert user_record.updated_at == datetime.utcfromtimestamp(
            block_timestamp)

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.handle == helpers.bytes32_to_str(entry.args._handle)
        assert (user_record.handle_lc == helpers.bytes32_to_str(
            entry.args._handle).lower())
        assert user_record.wallet == entry.args._wallet.lower()

        # ================== Test Update User Bio Event ==================
        event_type, entry = get_update_bio_event()

        assert user_record.bio == None
        assert user_record.handle != None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.bio == entry.args._bio

        # ================== Test Update User Location Event ==================
        event_type, entry = get_update_location_event()

        # `location` field is none by default
        assert user_record.location == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.location == helpers.bytes32_to_str(
            entry.args._location)

        # ================== Test Update User is Creator Event ==================
        event_type, entry = get_update_is_creator_event()

        # `is_creator` field is none by default
        assert user_record.is_creator == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.is_creator == entry.args._isCreator

        # ================== Test Update User Name Event ==================
        event_type, entry = get_update_name_event()

        # `name` field is none by default
        assert user_record.name == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.name == helpers.bytes32_to_str(entry.args._name)

        # ================== Test Update User CNodes Event for legacy ==================
        event_type, entry = get_update_creator_node_endpoint_event()

        # `creator_node_endpoint` field is none by default
        assert user_record.creator_node_endpoint == None

        # Set primary id so that creator node endpoints is not set
        assert user_record.primary_id == None
        user_record.primary_id = 1

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.creator_node_endpoint == None

        # Set primary id back to none
        user_record.primary_id = None

        # ================== Test Update User CNodes Event ==================
        event_type, entry = get_update_creator_node_endpoint_event()

        # `creator_node_endpoint` field is none by default
        assert user_record.creator_node_endpoint == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: handle, handle_lc, wallet
        assert user_record.creator_node_endpoint == entry.args._creatorNodeEndpoint

        # ================== Test Update User Profile Photo Event ==================
        event_type, entry = get_update_profile_photo_event()

        # `profile_picture` field is none by default
        assert user_record.profile_picture == None
        assert user_record.profile_picture_sizes == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: profile_picture_sizes, profile_picture
        assert user_record.profile_picture_sizes == helpers.multihash_digest_to_cid(
            entry.args._profilePhotoDigest)
        assert user_record.profile_picture == None

        # ================== Test Update User Cover Photo Event ==================
        event_type, entry = get_update_cover_photo_event()

        # `cover_photo` field is none by default
        assert user_record.cover_photo == None
        assert user_record.cover_photo_sizes == None

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            None,  # ipfs_metadata - not used
            block_timestamp,  # Used to update the user.updated_at field
        )

        # add_user should be updated fields: cover_photo, cover_photo_sizes
        assert user_record.cover_photo == None
        assert user_record.cover_photo_sizes == helpers.multihash_digest_to_cid(
            entry.args._coverPhotoDigest)

        # ================== Test Update User Metadata Event ==================
        event_type, entry = get_update_multihash_event()

        parse_user_event(
            None,  # self - not used
            update_task,  # only need the ipfs client for get_metadata
            session,
            None,  # tx_receipt - not used
            block_number,  # not used
            entry,  # Contains the event args used for updating
            event_type,  # String that should one of user_event_types_lookup
            user_record,  # User ORM instance
            update_task.cid_metadata_client.get_metadata(
                helpers.multihash_digest_to_cid(entry.args._multihashDigest),
                user_metadata_format,
                "",
            ),  # ipfs_metadata
            block_timestamp,  # Used to update the user.updated_at field
        )
        session.flush()

        entry_multihash = helpers.multihash_digest_to_cid(
            entry.args._multihashDigest)
        ipfs_metadata = update_task.cid_metadata_client.get_metadata(
            entry_multihash, "", "")

        assert user_record.profile_picture == ipfs_metadata["profile_picture"]
        assert user_record.cover_photo == ipfs_metadata["cover_photo"]
        assert user_record.bio == ipfs_metadata["bio"]
        assert user_record.name == ipfs_metadata["name"]
        assert user_record.location == ipfs_metadata["location"]
        assert (user_record.profile_picture_sizes ==
                ipfs_metadata["profile_picture_sizes"])
        assert user_record.cover_photo_sizes == ipfs_metadata[
            "cover_photo_sizes"]
        assert user_record.has_collectibles == True
        assert user_record.playlist_library == ipfs_metadata[
            "playlist_library"]

        assert user_record.is_deactivated == True

        ipfs_associated_wallets = ipfs_metadata["associated_wallets"]
        associated_wallets = (session.query(AssociatedWallet).filter_by(
            user_id=user_record.user_id,
            is_current=True,
            is_delete=False,
            chain="eth",
        ).all())
        for associated_wallet in associated_wallets:
            assert associated_wallet.wallet in ipfs_associated_wallets
        assert len(associated_wallets) == len(ipfs_associated_wallets)

        ipfs_associated_sol_wallets = ipfs_metadata["associated_sol_wallets"]
        associated_sol_wallets = (session.query(AssociatedWallet).filter_by(
            user_id=user_record.user_id,
            is_current=True,
            is_delete=False,
            chain="sol",
        ).all())
        for associated_wallet in associated_sol_wallets:
            assert associated_wallet.wallet in ipfs_associated_sol_wallets
        assert len(associated_sol_wallets) == len(ipfs_associated_sol_wallets)

        user_events = (session.query(UserEvents).filter_by(
            user_id=user_record.user_id, is_current=True).first())
        assert user_events.referrer == 2
        assert user_events.is_mobile_user == True
        calls = [
            mock.call.dispatch(ChallengeEvent.mobile_install, 1, 1),
            mock.call.dispatch(ChallengeEvent.referred_signup, 1, 1),
            mock.call.dispatch(ChallengeEvent.referral_signup, 1, 2,
                               {"referred_user_id": 1}),
        ]
        bus_mock.assert_has_calls(calls, any_order=True)
예제 #4
0
def parse_user_event(
        self, user_contract, update_task, session, tx_receipt, block_number, entry, event_type, user_record,
        block_timestamp):
    event_args = entry["args"]

    # type specific field changes
    if event_type == user_event_types_lookup["add_user"]:
        handle_str = helpers.bytes32_to_str(event_args._handle)
        user_record.handle = handle_str
        user_record.handle_lc = handle_str.lower()
        user_record.wallet = event_args._wallet.lower()
    elif event_type == user_event_types_lookup["update_multihash"]:
        metadata_multihash = event_args._multihashDigest
        user_record.metadata_multihash = helpers.multihash_digest_to_cid(metadata_multihash)
    elif event_type == user_event_types_lookup["update_name"]:
        user_record.name = helpers.bytes32_to_str(event_args._name)
    elif event_type == user_event_types_lookup["update_location"]:
        user_record.location = helpers.bytes32_to_str(event_args._location)
    elif event_type == user_event_types_lookup["update_bio"]:
        user_record.bio = event_args._bio
    elif event_type == user_event_types_lookup["update_profile_photo"]:
        user_record.profile_picture = helpers.multihash_digest_to_cid(event_args._profilePhotoDigest)
    elif event_type == user_event_types_lookup["update_cover_photo"]:
        user_record.cover_photo = helpers.multihash_digest_to_cid(event_args._coverPhotoDigest)
    elif event_type == user_event_types_lookup["update_is_creator"]:
        user_record.is_creator = event_args._isCreator
    elif event_type == user_event_types_lookup["update_is_verified"]:
        user_record.is_verified = event_args._isVerified
    elif event_type == user_event_types_lookup["update_creator_node_endpoint"]:
        user_record.creator_node_endpoint = event_args._creatorNodeEndpoint

    # New updated_at timestamp
    user_record.updated_at = datetime.utcfromtimestamp(block_timestamp)

    # If creator, look up metadata multihash in IPFS and override with metadata fields
    metadata_overrides = get_metadata_overrides_from_ipfs(
        session, update_task, user_record
    )

    if metadata_overrides:
        # metadata_overrides properties are defined in get_metadata_overrides_from_ipfs
        if metadata_overrides["profile_picture"]:
            user_record.profile_picture = metadata_overrides["profile_picture"]
        if metadata_overrides["profile_picture_sizes"]:
            user_record.profile_picture = metadata_overrides["profile_picture_sizes"]
        if metadata_overrides["cover_photo"]:
            user_record.cover_photo = metadata_overrides["cover_photo"]
        if metadata_overrides["cover_photo_sizes"]:
            user_record.cover_photo = metadata_overrides["cover_photo_sizes"]
        if metadata_overrides["bio"]:
            user_record.bio = metadata_overrides["bio"]
        if metadata_overrides["name"]:
            user_record.name = metadata_overrides["name"]
        if metadata_overrides["location"]:
            user_record.location = metadata_overrides["location"]

    # Refresh connection for non-creators
    refresh_user_connection(user_record, update_task)

    # if profile_picture CID is of a dir, store under _sizes field instead
    if user_record.profile_picture:
        logger.warning(f"users.py | Processing user profile_picture {user_record.profile_picture}")
        try:
            is_directory = update_task.ipfs_client.multihash_is_directory(user_record.profile_picture)
            if is_directory:
                user_record.profile_picture_sizes = user_record.profile_picture
                user_record.profile_picture = None
        except Exception as e:
            # we are unable to get the profile picture
            if 'invalid multihash' in str(e):
                user_record.profile_picture_sizes = None
                user_record.profile_picture = None
            else:
                raise e

    # if cover_photo CID is of a dir, store under _sizes field instead
    if user_record.cover_photo:
        logger.warning(f"users.py | Processing user cover photo {user_record.cover_photo}")
        try:
            is_directory = update_task.ipfs_client.multihash_is_directory(user_record.cover_photo)
            if is_directory:
                user_record.cover_photo_sizes = user_record.cover_photo
                user_record.cover_photo = None
        except Exception as e:
            # we are unable to get the profile picture
            if 'invalid multihash' in str(e):
                user_record.cover_photo_sizes = None
                user_record.cover_photo = None
            else:
                raise e

    return user_record
예제 #5
0
def parse_playlist_event(self, update_task, entry, event_type, playlist_record,
                         block_timestamp, session):
    event_args = entry["args"]
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)
    block_integer_time = int(block_timestamp)

    if event_type == playlist_event_types_lookup["playlist_created"]:
        logger.info(
            f"[playlist_created] | Creating playlist {playlist_record.playlist_id}"
        )
        playlist_record.playlist_owner_id = event_args._playlistOwnerId
        playlist_record.is_private = event_args._isPrivate
        playlist_record.is_album = event_args._isAlbum

        playlist_content_array = []
        for track_id in event_args._trackIds:
            playlist_content_array.append({
                "track": track_id,
                "time": block_integer_time
            })

        playlist_record.playlist_contents = {
            "track_ids": playlist_content_array
        }
        playlist_record.created_at = block_datetime

    if event_type == playlist_event_types_lookup["playlist_deleted"]:
        logger.info(
            f"[playlist_deleted] | Deleting playlist {playlist_record.playlist_id}"
        )
        playlist_record.is_delete = True

    if event_type == playlist_event_types_lookup["playlist_track_added"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            logger.info(
                f"[playlist_track_added] | Adding track {event_args._addedTrackId} to playlist \
            {playlist_record.playlist_id}")
            old_playlist_content_array = playlist_record.playlist_contents[
                "track_ids"]
            new_playlist_content_array = old_playlist_content_array
            # Append new track object
            new_playlist_content_array.append({
                "track": event_args._addedTrackId,
                "time": block_integer_time
            })
            playlist_record.playlist_contents = {
                "track_ids": new_playlist_content_array
            }
            playlist_record.timestamp = block_datetime

    if event_type == playlist_event_types_lookup["playlist_track_deleted"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            logger.info(
                f"[playlist_track_deleted] | Removing track {event_args._deletedTrackId} from \
            playlist {playlist_record.playlist_id}")
            old_playlist_content_array = playlist_record.playlist_contents[
                "track_ids"]
            new_playlist_content_array = []
            deleted_track_id = event_args._deletedTrackId
            deleted_track_timestamp = int(event_args._deletedTrackTimestamp)
            delete_track_entry_found = False
            for track_entry in old_playlist_content_array:
                if track_entry["track"] == deleted_track_id \
                        and track_entry["time"] == deleted_track_timestamp \
                        and not delete_track_entry_found:
                    delete_track_entry_found = True
                    continue
                new_playlist_content_array.append(track_entry)

            playlist_record.playlist_contents = {
                "track_ids": new_playlist_content_array
            }

    if event_type == playlist_event_types_lookup["playlist_tracks_ordered"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            logger.info(
                f"[playlist_tracks_ordered] | Ordering playlist {playlist_record.playlist_id}"
            )
            old_playlist_content_array = playlist_record.playlist_contents[
                "track_ids"]

            intermediate_track_time_lookup_dict = {}

            for old_playlist_entry in old_playlist_content_array:
                track_id = old_playlist_entry["track"]
                track_time = old_playlist_entry["time"]

                if track_id not in intermediate_track_time_lookup_dict:
                    intermediate_track_time_lookup_dict[track_id] = []

                intermediate_track_time_lookup_dict[track_id].append(
                    track_time)

            playlist_content_array = []
            for track_id in event_args._orderedTrackIds:
                track_time_array_length = len(
                    intermediate_track_time_lookup_dict[track_id])
                if track_time_array_length > 1:
                    track_time = intermediate_track_time_lookup_dict[
                        track_id].pop(0)
                elif track_time_array_length == 1:
                    track_time = intermediate_track_time_lookup_dict[track_id][
                        0]
                else:
                    track_time = block_integer_time
                playlist_content_array.append({
                    "track": track_id,
                    "time": track_time
                })

            playlist_record.playlist_contents = {
                "track_ids": playlist_content_array
            }

    if event_type == playlist_event_types_lookup["playlist_name_updated"]:
        logger.info(
            f"[playlist_name_updated] | Updating playlist {playlist_record.playlist_id} name \
        to {event_args._updatedPlaylistName}")
        playlist_record.playlist_name = event_args._updatedPlaylistName

    if event_type == playlist_event_types_lookup["playlist_privacy_updated"]:
        logger.info(
            f"[playlist_privacy_updated] | Updating playlist {playlist_record.playlist_id} \
        privacy to {event_args._updatedIsPrivate}")
        playlist_record.is_private = event_args._updatedIsPrivate

    if event_type == playlist_event_types_lookup[
            "playlist_cover_photo_updated"]:
        playlist_record.playlist_image_multihash = helpers.multihash_digest_to_cid(
            event_args._playlistImageMultihashDigest)

        # If cid is in blacklist, do not index playlist
        is_blacklisted = is_blacklisted_ipld(
            session, playlist_record.playlist_image_multihash)
        if is_blacklisted:
            logger.info(
                "[playlist_cover_photo_updated] | Encountered blacklisted CID %s in indexing \
                playlist image multihash",
                playlist_record.playlist_image_multihash)
            return None

        # All incoming playlist images are set to ipfs dir in column playlist_image_sizes_multihash
        if playlist_record.playlist_image_multihash:
            logger.info(
                f"[playlist_cover_photo_updated] | Processing playlist image \
            {playlist_record.playlist_image_multihash}")
            playlist_record.playlist_image_sizes_multihash = playlist_record.playlist_image_multihash
            playlist_record.playlist_image_multihash = None

    if event_type == playlist_event_types_lookup[
            "playlist_description_updated"]:
        logger.info(
            f"[playlist_description_updated] | Updating playlist {playlist_record.playlist_id} \
        description to {event_args._playlistDescription}")
        playlist_record.description = event_args._playlistDescription

    if event_type == playlist_event_types_lookup["playlist_upc_updated"]:
        logger.info(
            f"[playlist_upc_updated] | Updating playlist {playlist_record.playlist_id} UPC \
        to {event_args._playlistUPC}")
        playlist_record.upc = helpers.bytes32_to_str(event_args._playlistUPC)

    playlist_record.updated_at = block_datetime
    return playlist_record
예제 #6
0
def parse_playlist_event(
        self, update_task, entry, event_type, playlist_record, block_timestamp
):
    event_args = entry["args"]
    # Just use block_timestamp as integer
    block_datetime = datetime.utcfromtimestamp(block_timestamp)
    block_integer_time = int(block_timestamp)

    if event_type == playlist_event_types_lookup["playlist_created"]:
        playlist_record.playlist_owner_id = event_args._playlistOwnerId
        playlist_record.is_private = event_args._isPrivate
        playlist_record.is_album = event_args._isAlbum

        playlist_content_array = []
        for track_id in event_args._trackIds:
            playlist_content_array.append(
                {"track": track_id, "time": block_integer_time}
            )

        playlist_record.playlist_contents = {"track_ids": playlist_content_array}
        playlist_record.created_at = block_datetime

    if event_type == playlist_event_types_lookup["playlist_deleted"]:
        playlist_record.is_delete = True

    if event_type == playlist_event_types_lookup["playlist_track_added"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            print('playlist event playlist_track_added')
            old_playlist_content_array = playlist_record.playlist_contents["track_ids"]
            new_playlist_content_array = old_playlist_content_array
            # Append new track object
            new_playlist_content_array.append(
                {"track": event_args._addedTrackId, "time": block_integer_time}
            )
            playlist_record.playlist_contents = {"track_ids": new_playlist_content_array}
            playlist_record.timestamp = block_datetime

    if event_type == playlist_event_types_lookup["playlist_track_deleted"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            print('playlist event playlist_track_deleted')
            old_playlist_content_array = playlist_record.playlist_contents["track_ids"]
            new_playlist_content_array = []
            deleted_track_id = event_args._deletedTrackId
            deleted_track_timestamp = int(event_args._deletedTrackTimestamp)
            delete_track_entry_found = False
            for track_entry in old_playlist_content_array:
                if track_entry["track"] == deleted_track_id \
                        and track_entry["time"] == deleted_track_timestamp \
                        and not delete_track_entry_found:
                    delete_track_entry_found = True
                    continue
                new_playlist_content_array.append(track_entry)

            playlist_record.playlist_contents = {"track_ids": new_playlist_content_array}

    if event_type == playlist_event_types_lookup["playlist_tracks_ordered"]:
        if getattr(playlist_record, 'playlist_contents') is not None:
            print('playlist event playlist_tracks_ordered')
            old_playlist_content_array = playlist_record.playlist_contents["track_ids"]

            intermediate_track_time_lookup_dict = {}

            for old_playlist_entry in old_playlist_content_array:
                track_id = old_playlist_entry["track"]
                track_time = old_playlist_entry["time"]

                if track_id not in intermediate_track_time_lookup_dict:
                    intermediate_track_time_lookup_dict[track_id] = []

                intermediate_track_time_lookup_dict[track_id].append(track_time)

            playlist_content_array = []
            for track_id in event_args._orderedTrackIds:
                track_time_array_length = len(intermediate_track_time_lookup_dict[track_id])
                if track_time_array_length > 1:
                    track_time = intermediate_track_time_lookup_dict[track_id].pop(0)
                elif track_time_array_length == 1:
                    track_time = intermediate_track_time_lookup_dict[track_id][0]
                else:
                    track_time = block_integer_time
                playlist_content_array.append({"track": track_id, "time": track_time})

            playlist_record.playlist_contents = {"track_ids": playlist_content_array}

    if event_type == playlist_event_types_lookup["playlist_name_updated"]:
        playlist_record.playlist_name = event_args._updatedPlaylistName

    if event_type == playlist_event_types_lookup["playlist_privacy_updated"]:
        playlist_record.is_private = event_args._updatedIsPrivate

    if event_type == playlist_event_types_lookup["playlist_cover_photo_updated"]:
        playlist_record.playlist_image_multihash = helpers.multihash_digest_to_cid(
            event_args._playlistImageMultihashDigest
        )

        # if playlist_image_multihash CID is of a dir, store under _sizes field instead
        if playlist_record.playlist_image_multihash:
            ipfs = update_task.ipfs_client._api
            logger.warning(f"catting playlist_image_multihash {playlist_record.playlist_image_multihash}")
            try:
                # attempt to cat single byte from CID to determine if dir or file
                ipfs.cat(playlist_record.playlist_image_multihash, 0, 1)
            except Exception as e:  # pylint: disable=W0703
                if "this dag node is a directory" in str(e):
                    playlist_record.playlist_image_sizes_multihash = playlist_record.playlist_image_multihash
                    playlist_record.playlist_image_multihash = None
                    logger.info('Successfully processed CID')
                else:
                    raise Exception(e)

    if event_type == playlist_event_types_lookup["playlist_description_updated"]:
        playlist_record.description = event_args._playlistDescription

    if event_type == playlist_event_types_lookup["playlist_upc_updated"]:
        playlist_record.upc = helpers.bytes32_to_str(event_args._playlistUPC)

    playlist_record.updated_at = block_datetime
    return playlist_record
예제 #7
0
def parse_user_event(self, user_contract, update_task, session, tx_receipt,
                     block_number, entry, event_type, user_record,
                     block_timestamp):
    event_args = entry["args"]

    # type specific field changes
    if event_type == user_event_types_lookup["add_user"]:
        handle_str = helpers.bytes32_to_str(event_args._handle)
        user_record.handle = handle_str
        user_record.handle_lc = handle_str.lower()
        user_record.wallet = event_args._wallet.lower()
    elif event_type == user_event_types_lookup["update_multihash"]:
        metadata_multihash = helpers.multihash_digest_to_cid(
            event_args._multihashDigest)
        is_blacklisted = is_blacklisted_ipld(session, metadata_multihash)
        # If cid is in blacklist, do not update user
        if is_blacklisted:
            logger.info(
                f"index.py | users.py | Encountered blacklisted CID:"
                f"{metadata_multihash} in indexing update user metadata multihash"
            )
            return None
        user_record.metadata_multihash = metadata_multihash
    elif event_type == user_event_types_lookup["update_name"]:
        user_record.name = helpers.bytes32_to_str(event_args._name)
    elif event_type == user_event_types_lookup["update_location"]:
        user_record.location = helpers.bytes32_to_str(event_args._location)
    elif event_type == user_event_types_lookup["update_bio"]:
        user_record.bio = event_args._bio
    elif event_type == user_event_types_lookup["update_profile_photo"]:
        profile_photo_multihash = helpers.multihash_digest_to_cid(
            event_args._profilePhotoDigest)
        is_blacklisted = is_blacklisted_ipld(session, profile_photo_multihash)
        if is_blacklisted:
            logger.info(
                f"index.py | users.py | Encountered blacklisted CID:"
                f"{profile_photo_multihash} in indexing update user profile photo"
            )
            return None
        user_record.profile_picture = profile_photo_multihash
    elif event_type == user_event_types_lookup["update_cover_photo"]:
        cover_photo_multihash = helpers.multihash_digest_to_cid(
            event_args._coverPhotoDigest)
        is_blacklisted = is_blacklisted_ipld(session, cover_photo_multihash)
        if is_blacklisted:
            logger.info(
                f"index.py | users.py | Encountered blacklisted CID:"
                f"{cover_photo_multihash} in indexing update user cover photo")
            return None
        user_record.cover_photo = cover_photo_multihash
    elif event_type == user_event_types_lookup["update_is_creator"]:
        user_record.is_creator = event_args._isCreator
    elif event_type == user_event_types_lookup["update_is_verified"]:
        user_record.is_verified = event_args._isVerified
    elif event_type == user_event_types_lookup["update_creator_node_endpoint"]:
        # Ensure any user consuming the new UserReplicaSetManager contract does not process
        # legacy `creator_node_endpoint` changes
        # Reference user_replica_set.py for the updated indexing flow around this field
        replica_set_upgraded = user_replica_set_upgraded(user_record)
        logger.info(
            f"index.py | users.py | {user_record.handle} Replica set upgraded: {replica_set_upgraded}"
        )
        if not replica_set_upgraded:
            user_record.creator_node_endpoint = event_args._creatorNodeEndpoint

    # New updated_at timestamp
    user_record.updated_at = datetime.utcfromtimestamp(block_timestamp)

    # If the multihash is updated, fetch the metadata (if not fetched) and update the associated wallets column
    if event_type == user_event_types_lookup["update_multihash"]:
        # Look up metadata multihash in IPFS and override with metadata fields
        ipfs_metadata = get_ipfs_metadata(update_task, user_record)

        if ipfs_metadata:
            # ipfs_metadata properties are defined in get_ipfs_metadata

            # Fields also stored on chain
            if "profile_picture" in ipfs_metadata and \
                ipfs_metadata["profile_picture"]:
                user_record.profile_picture = ipfs_metadata["profile_picture"]

            if "cover_photo" in ipfs_metadata and \
                ipfs_metadata["cover_photo"]:
                user_record.cover_photo = ipfs_metadata["cover_photo"]

            if "bio" in ipfs_metadata and \
                ipfs_metadata["bio"]:
                user_record.bio = ipfs_metadata["bio"]

            if "name" in ipfs_metadata and \
                ipfs_metadata["name"]:
                user_record.name = ipfs_metadata["name"]

            if "location" in ipfs_metadata and \
                ipfs_metadata["location"]:
                user_record.location = ipfs_metadata["location"]

            # Fields with no on-chain counterpart
            if "profile_picture_sizes" in ipfs_metadata and \
                ipfs_metadata["profile_picture_sizes"]:
                user_record.profile_picture = ipfs_metadata[
                    "profile_picture_sizes"]

            if "cover_photo_sizes" in ipfs_metadata and \
                ipfs_metadata["cover_photo_sizes"]:
                user_record.cover_photo = ipfs_metadata["cover_photo_sizes"]

            if "collectibles" in ipfs_metadata and \
                ipfs_metadata["collectibles"] and \
                isinstance(ipfs_metadata["collectibles"], dict) and \
                ipfs_metadata["collectibles"].items():
                user_record.has_collectibles = True
            else:
                user_record.has_collectibles = False

            if 'associated_wallets' in ipfs_metadata:
                update_user_associated_wallets(
                    session, update_task, user_record,
                    ipfs_metadata['associated_wallets'])

    # All incoming profile photos intended to be a directory
    # Any write to profile_picture field is replaced by profile_picture_sizes
    if user_record.profile_picture:
        logger.info(
            f"index.py | users.py | Processing user profile_picture {user_record.profile_picture}"
        )
        user_record.profile_picture_sizes = user_record.profile_picture
        user_record.profile_picture = None

    # All incoming cover photos intended to be a directory
    # Any write to cover_photo field is replaced by cover_photo_sizes
    if user_record.cover_photo:
        logger.info(
            f"index.py | users.py | Processing user cover photo {user_record.cover_photo}"
        )
        user_record.cover_photo_sizes = user_record.cover_photo
        user_record.cover_photo = None
    return user_record
예제 #8
0
def parse_user_event(
    self,
    update_task: DatabaseTask,
    session: Session,
    tx_receipt,
    block_number,
    entry,
    event_type,
    user_record,
    ipfs_metadata,
    block_timestamp,
):
    # type specific field changes
    if event_type == user_event_types_lookup["add_user"]:
        handle_str = helpers.bytes32_to_str(
            helpers.get_tx_arg(entry, "_handle"))
        user_record.handle = handle_str
        user_record.handle_lc = handle_str.lower()
        user_record.wallet = helpers.get_tx_arg(entry, "_wallet").lower()
    elif event_type == user_event_types_lookup["update_multihash"]:
        metadata_multihash = helpers.multihash_digest_to_cid(
            helpers.get_tx_arg(entry, "_multihashDigest"))
        user_record.metadata_multihash = metadata_multihash
    elif event_type == user_event_types_lookup["update_name"]:
        user_record.name = helpers.bytes32_to_str(
            helpers.get_tx_arg(entry, "_name"))
    elif event_type == user_event_types_lookup["update_location"]:
        user_record.location = helpers.bytes32_to_str(
            helpers.get_tx_arg(entry, "_location"))
    elif event_type == user_event_types_lookup["update_bio"]:
        user_record.bio = helpers.get_tx_arg(entry, "_bio")
    elif event_type == user_event_types_lookup["update_profile_photo"]:
        profile_photo_multihash = helpers.multihash_digest_to_cid(
            helpers.get_tx_arg(entry, "_profilePhotoDigest"))
        is_blacklisted = is_blacklisted_ipld(session, profile_photo_multihash)
        if is_blacklisted:
            logger.info(
                f"index.py | users.py | Encountered blacklisted CID:"
                f"{profile_photo_multihash} in indexing update user profile photo"
            )
            return None
        user_record.profile_picture = profile_photo_multihash
    elif event_type == user_event_types_lookup["update_cover_photo"]:
        cover_photo_multihash = helpers.multihash_digest_to_cid(
            helpers.get_tx_arg(entry, "_coverPhotoDigest"))
        is_blacklisted = is_blacklisted_ipld(session, cover_photo_multihash)
        if is_blacklisted:
            logger.info(
                f"index.py | users.py | Encountered blacklisted CID:"
                f"{cover_photo_multihash} in indexing update user cover photo")
            return None
        user_record.cover_photo = cover_photo_multihash
    elif event_type == user_event_types_lookup["update_is_creator"]:
        user_record.is_creator = helpers.get_tx_arg(entry, "_isCreator")
    elif event_type == user_event_types_lookup["update_is_verified"]:
        user_record.is_verified = helpers.get_tx_arg(entry, "_isVerified")
        if user_record.is_verified:
            update_task.challenge_event_bus.dispatch(
                ChallengeEvent.connect_verified,
                block_number,
                user_record.user_id,
            )

    elif event_type == user_event_types_lookup["update_creator_node_endpoint"]:
        # Ensure any user consuming the new UserReplicaSetManager contract does not process
        # legacy `creator_node_endpoint` changes
        # Reference user_replica_set.py for the updated indexing flow around this field
        replica_set_upgraded = user_replica_set_upgraded(user_record)
        logger.info(
            f"index.py | users.py | {user_record.handle} Replica set upgraded: {replica_set_upgraded}"
        )
        if not replica_set_upgraded:
            user_record.creator_node_endpoint = helpers.get_tx_arg(
                entry, "_creatorNodeEndpoint")

    # New updated_at timestamp
    user_record.updated_at = datetime.utcfromtimestamp(block_timestamp)

    # If the multihash is updated, fetch the metadata (if not fetched) and update the associated wallets column
    if event_type == user_event_types_lookup["update_multihash"]:
        # Look up metadata multihash in IPFS and override with metadata fields
        if ipfs_metadata:
            # Fields also stored on chain
            if "profile_picture" in ipfs_metadata and ipfs_metadata[
                    "profile_picture"]:
                user_record.profile_picture = ipfs_metadata["profile_picture"]

            if "cover_photo" in ipfs_metadata and ipfs_metadata["cover_photo"]:
                user_record.cover_photo = ipfs_metadata["cover_photo"]

            if "bio" in ipfs_metadata and ipfs_metadata["bio"]:
                user_record.bio = ipfs_metadata["bio"]

            if "name" in ipfs_metadata and ipfs_metadata["name"]:
                user_record.name = ipfs_metadata["name"]

            if "location" in ipfs_metadata and ipfs_metadata["location"]:
                user_record.location = ipfs_metadata["location"]

            # Fields with no on-chain counterpart
            if ("profile_picture_sizes" in ipfs_metadata
                    and ipfs_metadata["profile_picture_sizes"]):
                user_record.profile_picture = ipfs_metadata[
                    "profile_picture_sizes"]

            if ("cover_photo_sizes" in ipfs_metadata
                    and ipfs_metadata["cover_photo_sizes"]):
                user_record.cover_photo = ipfs_metadata["cover_photo_sizes"]

            if ("collectibles" in ipfs_metadata
                    and ipfs_metadata["collectibles"]
                    and isinstance(ipfs_metadata["collectibles"], dict)
                    and ipfs_metadata["collectibles"].items()):
                user_record.has_collectibles = True
            else:
                user_record.has_collectibles = False

            if "associated_wallets" in ipfs_metadata:
                update_user_associated_wallets(
                    session,
                    update_task,
                    user_record,
                    ipfs_metadata["associated_wallets"],
                    "eth",
                )

            if "associated_sol_wallets" in ipfs_metadata:
                update_user_associated_wallets(
                    session,
                    update_task,
                    user_record,
                    ipfs_metadata["associated_sol_wallets"],
                    "sol",
                )

            if ("playlist_library" in ipfs_metadata
                    and ipfs_metadata["playlist_library"]):
                user_record.playlist_library = ipfs_metadata[
                    "playlist_library"]

            if "is_deactivated" in ipfs_metadata:
                user_record.is_deactivated = ipfs_metadata["is_deactivated"]

            if "events" in ipfs_metadata and ipfs_metadata["events"]:
                update_user_events(
                    session,
                    user_record,
                    ipfs_metadata["events"],
                    update_task.challenge_event_bus,
                )

    # All incoming profile photos intended to be a directory
    # Any write to profile_picture field is replaced by profile_picture_sizes
    if user_record.profile_picture:
        logger.info(
            f"index.py | users.py | Processing user profile_picture {user_record.profile_picture}"
        )
        user_record.profile_picture_sizes = user_record.profile_picture
        user_record.profile_picture = None

    # All incoming cover photos intended to be a directory
    # Any write to cover_photo field is replaced by cover_photo_sizes
    if user_record.cover_photo:
        logger.info(
            f"index.py | users.py | Processing user cover photo {user_record.cover_photo}"
        )
        user_record.cover_photo_sizes = user_record.cover_photo
        user_record.cover_photo = None

    if not all_required_fields_present(User, user_record):
        raise EntityMissingRequiredFieldError(
            "user",
            user_record,
            f"Error parsing user {user_record} with entity missing required field(s)",
        )

    return user_record