def get_endpoint_from_id(update_task, sp_factory_inst, sp_id):
    endpoint = None
    # Get sp_id cache key
    cache_key = get_sp_id_key(sp_id)
    # Attempt to fetch from cache
    sp_info_cached = get_json_cached_key(update_task.redis, cache_key)
    if sp_info_cached:
        endpoint = sp_info_cached[1]
        logger.info(
            f"index.py | user_replica_set.py | CACHE HIT FOR {cache_key}, found {sp_info_cached}"
        )
        return sp_factory_inst, endpoint

    if not endpoint:
        logger.info(
            f"index.py | user_replica_set.py | CACHE MISS FOR {cache_key}, found {sp_info_cached}"
        )
        if sp_factory_inst is None:
            sp_factory_inst = get_sp_factory_inst(update_task)

        cn_endpoint_info = sp_factory_inst.functions.getServiceEndpointInfo(
            content_node_service_type, sp_id).call()
        logger.info(
            f"index.py | user_replica_set.py | spID={sp_id} fetched {cn_endpoint_info}"
        )
        endpoint = cn_endpoint_info[1]

    return sp_factory_inst, endpoint
示例#2
0
def test_json_cache_single_key(redis_mock):
    """Test that values may be set and fetched from the redis cache"""
    set_json_cached_key(redis_mock, "key", {
        "name": "joe",
        "favorite_band": "Pink"
    })
    assert get_json_cached_key(redis_mock, "key") == {
        "name": "joe",
        "favorite_band": "Pink",
    }
示例#3
0
def fetch_cnode_info(sp_id, sp_factory_instance, redis):
    sp_id_key = get_sp_id_key(sp_id)
    sp_info_cached = get_json_cached_key(redis, sp_id_key)
    if sp_info_cached:
        logger.info(
            f"eth_contract_helpers.py | Found cached value for spID={sp_id} - {sp_info_cached}"
        )
        return sp_info_cached

    cn_endpoint_info = sp_factory_instance.functions.getServiceEndpointInfo(
        content_node_service_type, sp_id
    ).call()
    set_json_cached_key(redis, sp_id_key, cn_endpoint_info, cnode_info_redis_ttl)
    logger.info(
        f"eth_contract_helpers.py | Configured redis {sp_id_key} - {cn_endpoint_info} - TTL {cnode_info_redis_ttl}"
    )
    return cn_endpoint_info
def set_indexing_error(
    redis_instance, blocknumber, blockhash, txhash, message, has_consensus=False
):
    indexing_error = get_json_cached_key(redis_instance, INDEXING_ERROR_KEY)

    if indexing_error is None or (
        indexing_error["blocknumber"] != blocknumber
        or indexing_error["blockhash"] != blockhash
        or indexing_error["txhash"] != txhash
    ):
        indexing_error = {
            "count": 1,
            "blocknumber": blocknumber,
            "blockhash": blockhash,
            "txhash": txhash,
            "message": message,
            "has_consensus": has_consensus,
        }
        set_json_cached_key(redis_instance, INDEXING_ERROR_KEY, indexing_error)
    else:
        indexing_error["count"] += 1
        indexing_error["has_consensus"] = has_consensus
        set_json_cached_key(redis_instance, INDEXING_ERROR_KEY, indexing_error)
def get_indexing_error(redis_instance):
    indexing_error = get_json_cached_key(redis_instance, INDEXING_ERROR_KEY)
    return indexing_error
def get_scorable_track_data(session, redis_instance, strategy):
    """
    Returns a map: {
        "track_id": string
        "created_at": string
        "owner_id": number
        "windowed_save_count": number
        "save_count": number
        "repost_count": number
        "windowed_repost_count": number
        "owner_follower_count": number
        "karma": number
        "listens": number
        "owner_verified": boolean
    }
    """

    score_params = strategy.get_score_params()
    S = score_params["S"]
    r = score_params["r"]
    q = score_params["q"]
    o = score_params["o"]
    f = score_params["f"]
    qr = score_params["qr"]
    xf = score_params["xf"]
    pt = score_params["pt"]
    trending_key = make_trending_cache_key("week", None, strategy.version)
    track_ids = []
    old_trending = get_json_cached_key(redis_instance, trending_key)
    if old_trending:
        track_ids = old_trending[1]
    exclude_track_ids = track_ids[:qr]

    # Get followers
    follower_query = (
        session.query(
            Follow.followee_user_id.label("user_id"),
            User.is_verified.label("is_verified"),
            func.count(Follow.followee_user_id).label("follower_count"),
        )
        .join(User, User.user_id == Follow.followee_user_id)
        .filter(
            Follow.is_current == True,
            Follow.is_delete == False,
            User.is_current == True,
            Follow.created_at < (datetime.now() - timedelta(days=f)),
        )
        .group_by(Follow.followee_user_id, User.is_verified)
    ).subquery()

    base_query = (
        session.query(
            AggregatePlays.play_item_id.label("track_id"),
            follower_query.c.user_id,
            follower_query.c.follower_count,
            AggregatePlays.count,
            Track.created_at,
            follower_query.c.is_verified,
        )
        .join(Track, Track.track_id == AggregatePlays.play_item_id)
        .join(follower_query, follower_query.c.user_id == Track.owner_id)
        .join(AggregateUser, AggregateUser.user_id == Track.owner_id)
        .filter(
            Track.is_current == True,
            Track.is_delete == False,
            Track.is_unlisted == False,
            Track.stem_of == None,
            Track.track_id.notin_(exclude_track_ids),
            Track.created_at >= (datetime.now() - timedelta(days=o)),
            follower_query.c.follower_count < S,
            follower_query.c.follower_count >= pt,
            AggregateUser.following_count < r,
            AggregatePlays.count >= q,
        )
    ).all()

    tracks_map = {
        record[0]: {
            "track_id": record[0],
            "created_at": record[4].isoformat(timespec="seconds"),
            "owner_id": record[1],
            "windowed_save_count": 0,
            "save_count": 0,
            "repost_count": 0,
            "windowed_repost_count": 0,
            "owner_follower_count": record[2],
            "karma": 1,
            "listens": record[3],
            "owner_verified": record[5],
        }
        for record in base_query
    }

    track_ids = [record[0] for record in base_query]

    # Get all the extra values
    repost_counts = get_repost_counts(
        session, False, False, track_ids, [RepostType.track]
    )

    windowed_repost_counts = get_repost_counts(
        session, False, False, track_ids, [RepostType.track], None, "week"
    )

    save_counts = get_save_counts(session, False, False, track_ids, [SaveType.track])

    windowed_save_counts = get_save_counts(
        session, False, False, track_ids, [SaveType.track], None, "week"
    )

    karma_scores = get_karma(session, tuple(track_ids), strategy, None, False, xf)

    # Associate all the extra data
    for (track_id, repost_count) in repost_counts:
        tracks_map[track_id]["repost_count"] = repost_count
    for (track_id, repost_count) in windowed_repost_counts:
        tracks_map[track_id]["windowed_repost_count"] = repost_count
    for (track_id, save_count) in save_counts:
        tracks_map[track_id]["save_count"] = save_count
    for (track_id, save_count) in windowed_save_counts:
        tracks_map[track_id]["windowed_save_count"] = save_count
    for (track_id, karma) in karma_scores:
        tracks_map[track_id]["karma"] = karma

    return list(tracks_map.values())
示例#7
0
def test_json_cache_date_value(redis_mock):
    date = datetime(2016, 2, 18, 9, 50, 20)
    set_json_cached_key(redis_mock, "key", {"date": date})
    result = get_json_cached_key(redis_mock, "key")
    assert parser.parse(result["date"]) == date
def index_listen_count_milestones(db: SessionManager, redis: Redis):
    logger.info(
        "index_listen_count_milestones.py | Start calculating listen count milestones"
    )
    latest_plays_slot = redis.get(latest_sol_plays_slot_key)
    job_start = time.time()
    with db.scoped_session() as session:
        current_play_indexing = get_json_cached_key(redis,
                                                    CURRENT_PLAY_INDEXING)
        if not current_play_indexing or current_play_indexing["slot"] is None:
            return

        check_track_ids = get_track_listen_ids(redis)

        # Pull off current play indexed slot number from redis
        # Pull off track ids to check from redis
        existing_milestone = (session.query(
            Milestone.id, func.max(Milestone.threshold)).filter(
                Milestone.name == LISTEN_COUNT_MILESTONE,
                Milestone.id.in_(check_track_ids),
            ).group_by(Milestone.id).all())

        aggregate_play_counts = (session.query(
            AggregatePlays.play_item_id,
            AggregatePlays.count,
        ).filter(AggregatePlays.play_item_id.in_(check_track_ids)).all())

        milestones = dict(existing_milestone)
        play_counts = dict(aggregate_play_counts)

        # Bulk fetch track's next milestone threshold
        listen_milestones = []
        for track_id in check_track_ids:
            current_milestone = None
            if track_id not in play_counts:
                continue
            if track_id in milestones:
                current_milestone = milestones[track_id]
            next_milestone_threshold = get_next_track_milestone(
                play_counts[track_id], current_milestone)
            if next_milestone_threshold:
                listen_milestones.append(
                    Milestone(
                        id=track_id,
                        threshold=next_milestone_threshold,
                        name=LISTEN_COUNT_MILESTONE,
                        slot=current_play_indexing["slot"],
                        timestamp=datetime.utcfromtimestamp(
                            int(current_play_indexing["timestamp"])),
                    ))

        if listen_milestones:
            session.bulk_save_objects(listen_milestones)

        redis.set(PROCESSED_LISTEN_MILESTONE, current_play_indexing["slot"])
        if check_track_ids:
            redis.srem(TRACK_LISTEN_IDS, *check_track_ids)

    job_end = time.time()
    job_total = job_end - job_start
    logger.info(
        f"index_listen_count_milestones.py | Finished calculating trending in {job_total} seconds",
        extra={
            "job": "index_listen_count_milestones",
            "total_time": job_total
        },
    )
    if latest_plays_slot is not None:
        redis.set(latest_sol_listen_count_milestones_slot_key,
                  int(latest_plays_slot))