Пример #1
0
def test():
    top_users = redis.zrevrange("leaderboard", 0, 9, withscores=True)
    a = [{
        "user": json.loads(redis.smembers(user).pop().decode('utf-8')),
        "score": score
    } for user, score in top_users]
    return json.dumps(a)
Пример #2
0
def index():
    redis.zincrby("counters", 1, hostname)
    counters = redis.zrevrange("counters", 0, -1, withscores=True)
    counters = [ (s.decode(), int(i)) for (s,i) in counters ]
    thiscount = int(redis.zscore("counters", hostname))
    totalcount = sum(i for (s,i) in counters)
    return render_template( "index.html",
        hostname=hostname, counters=counters,
        thiscount=thiscount, totalcount=totalcount)
def index():
    redis.zincrby("counters", hostname)
    counters = redis.zrevrange("counters", 0, -1, withscores=True)
    counters = [ (s.decode(), int(i)) for (s,i) in counters ]
    thiscount = int(redis.zscore("counters", hostname))
    totalcount = sum(i for (s,i) in counters)
    return render_template( "index.html",
        hostname=hostname, counters=counters,
        thiscount=thiscount, totalcount=totalcount)
Пример #4
0
def get_gold_week_win_rank(redis, groupid):
    first = first_day_of_week()
    last = first + timedelta(7)
    keys = []
    while first <= last:
        keys.append(GOLD_WIN_RANK_WITH_AGENT_ZSET_BYDAY % (groupid, first.strftime("%Y-%m-%d")))
        first += timedelta(1)
    redis.zunionstore('gold:win:rank:%s:thisweek:zset' % groupid, keys, aggregate='MAX')
    return redis.zrevrange('gold:win:rank:%s:thisweek:zset' % groupid, 0, 10 - 1, True)
Пример #5
0
def recent_gists(start=None):
    start = 0 if start == None else start
    end = start + 30 if start is not None else -1
    
    pipe = redis.pipeline()
    for id in redis.zrevrange('index', start, end):
        pipe.hmget("gist:#"+str(id), 'payload')

    return map(lambda x: json.loads(x[0]),
           filter(lambda x:x is not None, pipe.execute()))
Пример #6
0
def handler(event, context):
    result = redis.zrevrange('Rating', 0, -1, True)
    flat_list = [item for sublist in result for item in sublist]
    ret = []
    for i in range(0, len(flat_list)):
        if ( i % 2 == 0 ):
            org = json.loads(flat_list[i])
            org['Score'] = int(flat_list[i+1])
            ret.append(org)
            
    return ret
Пример #7
0
def handler(event, context):
    result = redis.zrevrange('Rating', 0, -1, True)
    flat_list = [item for sublist in result for item in sublist]
    ret = []
    for i in range(0, len(flat_list)):
        if (i % 2 == 0):
            org = json.loads(flat_list[i])
            org['Score'] = int(flat_list[i + 1])
            ret.append(org)

    return ret
Пример #8
0
    def get(self):
        redis = datastore.get_datastore()
        finished = request.args.get('finished')

        start = 0
        end = -1
        if 'offset' in request.args: start = int(request.args.get('offset'))
        if 'limit' in request.args:
            end = start + int(request.args.get('limit')) - 1

        cAll = redis.zcount('sortet-activities:all', '-inf', '+inf')
        cFinished = redis.zcount('sortet-activities:finished', '-inf', '+inf')
        cNotFinished = cAll - cFinished

        if (finished == 'true'):
            actKeys = redis.zrevrange('sortet-activities:finished', start, end)
            total = cFinished
        else:
            actKeys = redis.zrevrange('sortet-activities:all', start, end)
            total = cAll

        allActivities = []
        for k in actKeys:
            activityencoded = redis.hget('activitieshash', k)
            if activityencoded != None:
                activity = pickle.loads(activityencoded)
                activity['url'] = request.url + "/" + str(activity['id'])
                allActivities.append(activity)

        limit = end - start + 1
        resultset = {
            "count": len(actKeys),
            "total": total,
            "offset": start,
            "limit": limit,
            "pending": cNotFinished
        }
        fullresponse = {"metadata": resultset, "content": allActivities}
        return fullresponse, 200
Пример #9
0
    def __iter__(self):
        redis = get_redis_connection()

        postfix = ""
        if self.event_type is not None:
            postfix += ":%s" % (self.event_type.slug)
        lookup_keys = [
            obj.lookup_key() + postfix
            for obj in self.objs
        ]

        if len(lookup_keys) >= 2:
            s = hashlib.sha1()
            for lookup_key in lookup_keys:
                s.update(lookup_key)
            key = s.hexdigest()
            redis.zunionstore(key, lookup_keys, aggregate="MIN")
            # Expire it in 5 minutes, enough that paginating shouldn't require
            # a recompute, but short enough to not clutter the place up.
            redis.expire(key, 60 * 5)
        elif len(lookup_keys) == 1:
            key = lookup_keys[0]
        else:
            assert not self.event_type
            key = "ALL_EVENTS"

        statuses = defaultdict(lambda: Status(0, 0))
        items = list(redis.zrevrange(key, self.offset, self.limit, withscores=True))
        parsed_items = []
        context_items = {}
        for cluster, score in items:
            data = json.loads(cluster)
            parsed_items.append((data, score))
            for o in data["items"]:
                status_key = self._status_key(data["slug"], o)
                status = statuses[status_key]
                if o["remove"]:
                    statuses[status_key] = status._replace(removes=status.removes+1)
                else:
                    statuses[status_key] = status._replace(adds=status.adds+1)
                for key, val in o["context"].iteritems():
                    field = EventType.registry[data["slug"]].context_shape[key]
                    key = field.unique_key()
                    if key not in context_items:
                        context_items[key] = RawResults(field, set())
                    context_items[key].vals.add(val)

        final_context_items = {}
        for key, (field, vals) in context_items.iteritems():
            final_context_items[key] = field.deserialize_bulk(vals)

        for data, score in parsed_items:
            cluster_items = []
            timestamp = datetime.fromtimestamp(score)
            for o in data["items"]:
                item = self._convert_item(
                    data["slug"], o, timestamp, statuses, final_context_items, data["cluster_id"]
                )
                if item is not None:
                    cluster_items.append(item)
            if cluster_items:
                clustered_on = None
                if data["clustered_on"] is not None:
                    clustered_on = cluster_items[0].context[data["clustered_on"]]
                yield StreamCluster(
                    data["slug"],
                    timestamp,
                    cluster_items,
                    clustered_on,
                    data["cluster_id"]
                )
Пример #10
0
def partitions():
    redis = get_redis()

    start = request.args.get('start')
    end = request.args.get('end')

    key = current_app.config['KEY_PREFIX'] + 'PT' + str(
        current_app.config['PARTITION']) + 'M'

    first = redis.zrange(key, 0, 0)
    last = redis.zrevrange(key, 0, 0)

    prefix_len = len(current_app.config['KEY_PREFIX'])

    if len(first) == 0:
        return jsonify({})

    first = first[0].decode('utf-8')
    first_datetime = first[prefix_len:first.rfind('PT')]
    last = last[0].decode('utf-8')
    last_datetime = last[prefix_len:last.rfind('PT')]

    partition_info = {
        'duration': 'PT' + str(current_app.config['PARTITION']) + 'M',
        'first': {
            'at': first_datetime,
            'partition': first
        },
        'last': {
            'at': last_datetime,
            'partition': last
        }
    }
    if start is None and end is None:
        return jsonify(partition_info)

    if start is None:
        start = first_datetime

    if start.find('T') < 0:
        start += 'T00:00:00'

    if end is not None and end.find('T') < 0:
        end += 'T23:59:59'

    start = datetime.fromisoformat(start)

    if end is None:
        timestamp = datetime.utcnow()
        partition_no = timestamp.minute // current_app.config['PARTITION']
        end = datetime(timestamp.year,
                       timestamp.month,
                       timestamp.day,
                       timestamp.hour,
                       partition_no * current_app.config['PARTITION'],
                       tzinfo=timestamp.tzinfo)
    else:
        end = datetime.fromisoformat(end)

    start_score = datetime_score(start)
    end_score = datetime_score(end)

    partitions = list(
        map(lambda v: v.decode('utf-8')[prefix_len:],
            redis.zrangebyscore(key, start_score, end_score)))

    partition_info['partitions'] = partitions
    return jsonify(partition_info)
Пример #11
0
try:
    nowTime = datetime.now()
    nowHour = nowTime.strftime("%H:%M")
    nowDay = nowTime.strftime('%Y-%m-%d')
    addLog('on refresh time: %s %s' % (nowDay, nowHour))
    if nowHour in REFRESH_TIMES:
        time.sleep(WAIT_SLEEP_TIME)  #等待入库完成
        lastRank = RANK_COUNT - 1
        if nowHour == '00:00':
            yesterday = date.today() - timedelta(days=1)
            table = FORMAT_USER_COINDELTA_TABLE % (yesterday)
        else:
            table = FORMAT_USER_COINDELTA_TABLE % (nowDay)
        redis.delete(TMP_FORMAT_USER_COINDELTA_TABLE)
        addLog('table:%s' % (table))
        lastPlayers = redis.zrevrange(table, lastRank, lastRank, True,
                                      int)
        if lastPlayers:
            lastPlayers = redis.zrevrangebyscore(table,
                                                 lastPlayers[0][1],
                                                 lastPlayers[0][1],
                                                 score_cast_func=int)
            lastRank = redis.zrevrank(
                table, lastPlayers[0]) + len(lastPlayers) - 1
        players = redis.zrevrange(table, 0, lastRank, True, int)
        #更低排名玩家的还存不存在,不存在则屏蔽盈利0分玩家
        zeroPlayers = redis.zrevrangebyscore(table,
                                             0,
                                             0,
                                             score_cast_func=int)
        if zeroPlayers:
            less0Rank = redis.zrevrank(table, zeroPlayers[-1])
Пример #12
0
    def __iter__(self):
        redis = get_redis_connection()

        postfix = ""
        if self.event_type is not None:
            postfix += ":%s" % (self.event_type.slug)
        lookup_keys = [obj.lookup_key() + postfix for obj in self.objs]

        if len(lookup_keys) >= 2:
            s = hashlib.sha1()
            for lookup_key in lookup_keys:
                s.update(lookup_key)
            key = s.hexdigest()
            redis.zunionstore(key, lookup_keys, aggregate="MIN")
            # Expire it in 5 minutes, enough that paginating shouldn't require
            # a recompute, but short enough to not clutter the place up.
            redis.expire(key, 60 * 5)
        elif len(lookup_keys) == 1:
            key = lookup_keys[0]
        else:
            assert not self.event_type
            key = "ALL_EVENTS"

        statuses = defaultdict(lambda: Status(0, 0))
        items = list(
            redis.zrevrange(key, self.offset, self.limit, withscores=True))
        parsed_items = []
        context_items = {}
        for cluster, score in items:
            data = json.loads(cluster)
            parsed_items.append((data, score))
            for o in data["items"]:
                status_key = self._status_key(data["slug"], o)
                status = statuses[status_key]
                if o["remove"]:
                    statuses[status_key] = status._replace(
                        removes=status.removes + 1)
                else:
                    statuses[status_key] = status._replace(adds=status.adds +
                                                           1)
                for key, val in o["context"].iteritems():
                    field = EventType.registry[data["slug"]].context_shape[key]
                    key = field.unique_key()
                    if key not in context_items:
                        context_items[key] = RawResults(field, set())
                    context_items[key].vals.add(val)

        final_context_items = {}
        for key, (field, vals) in context_items.iteritems():
            final_context_items[key] = field.deserialize_bulk(vals)

        for data, score in parsed_items:
            cluster_items = []
            timestamp = datetime.fromtimestamp(score)
            for o in data["items"]:
                item = self._convert_item(data["slug"], o, timestamp, statuses,
                                          final_context_items,
                                          data["cluster_id"])
                if item is not None:
                    cluster_items.append(item)
            if cluster_items:
                clustered_on = None
                if data["clustered_on"] is not None:
                    clustered_on = cluster_items[0].context[
                        data["clustered_on"]]
                yield StreamCluster(data["slug"], timestamp, cluster_items,
                                    clustered_on, data["cluster_id"])
Пример #13
0
print("--- %s seconds ---" % (time.time() - start_time))
start_time = time.time()

print "\nUsing sorted set with pipeline"
#for line in page:
for line in page.iter_lines():
    if line is not '' and line is not ' ':
        for word in line.split():
            redis_pipe.zincrby('bm_text', 1, word)

responses = redis_pipe.execute()
for response in responses:
    pass

real_results = redis.zrevrange('bm_text', 0, 49)
print("--- %s seconds ---" % (time.time() - start_time))
print('Memory used %s' % redis.memory_usage('bm_text'))
print('This is an accurate list for comparison')
print(redis.zcount('bm_text', '-inf', '+inf'))

# test Top-K
print("K Width(*k) Depth Memory Accuracy Time")
k_list = [10, 50, 100, 1000]
for k in k_list:
    real_results = redis.zrevrange('bm_text', 0, k - 1)
    for width in [4, 8]:
        for depth in [3, 7, 10]:
            redis.execute_command('DEL', 'bm_topk')
            start_time = time.time()
            create_topk(redis, k, width, depth)