def GET(self): r = Redis(connection_pool=redis_pool) count = 0 for key in r.hgetall(URL_HASH_NAME): count += 1 return render_template('index.html', count=count, section_class='index', user_id=None)
def hgetall(self, name): """ Return a Python dict of the hash's name/value pairs, both key and value decoded. """ output = {} info = Redis.hgetall(self, name) for key, value in info.iteritems(): output[key.decode(self.encoding)] = value.decode(self.encoding) return output
def GET(self): r = Redis(connection_pool=redis_pool) user_id = check_token(r) all_keys = r.hgetall(URL_HASH_NAME) url_list = [] for key in all_keys: url_list.append( ( all_keys[key].replace('http://', '').replace('https://', ''), key, r.hget(COUNT_HASH_NAME, key) or 0, r.hget(LOG_HASH_NAME, key) or '' ), ) return render_template('list.html', user_id=user_id, list=url_list, is_all=True)
def index(request): count = int(request.GET.get('count',0)) since_id = request.GET.get('since_id',None) format = request.GET.get('format',"html") userid = request.GET.get('userid',"1") client = Redis(decode_responses=True) ru = client.hgetall("user_"+userid) li = [] l = [] userdata = {} p = client.pipeline() tweetidlist = ru['tweets'][1:-1].replace(" ","").split(",") tweetCount = len(tweetidlist) if since_id: tweetidlist = tweetidlist[tweetidlist.index(since_id):] if count != 0 and count < len(tweetidlist): tweetidlist = tweetidlist[:count] tweetwords = ["tweet_" + s for s in tweetidlist] for tweetword in tweetwords: p.hgetall(tweetword) tcount = 0 for t in p.execute(): if t["user"] not in userdata: userdata[t["user"]] = client.hgetall("user_"+t["user"]) #tweetstr = tweetidlist[count] + " : " + t["text"] + " by " + userdata[t["user"]]["name"] data = {} data["tweet"] = t data["tweetid"] = tweetidlist[tcount] data["username"] = userdata[t["user"]]["name"] data["refCount"] = 20 if t["user"] == userid and count != 0: data["refCount"] = count l.append(data) tcount = tcount + 1 #rt = client.hgetall("tweet_1") #rt1 = client.hgetall("tweet_2") #rt2 = client.hgetall("tweet_3") # u = Users(name=ru["name"],id=1) # t = Tweets(text=rt["text"],user=u, id=1) # t1 = Tweets(text=rt1["text"],user=u, id=2) # t2 = Tweets(text=rt2["text"],user=u, id=3) #l = [rt2,rt1,rt] #li = [3,2,1] #data = jsonpickle.encode(l,unpicklable=False) userinfo = {"name":ru["name"],"userid":userid, "count":count,"nextStart" : tweetidlist[-1]} userinfo["tweetCount"] = tweetCount userinfo["follows"] = 4 template = loader.get_template('index.html') context = { 'user': userinfo, 'tweets': l } if format.lower() == "json": return JsonResponse(context,safe=False) return HttpResponse(template.render(context, request))
db=redis_db, password=redis_password) # Wait for redis connection while True: try: db.ping() except ConnectionError: logging.info("Waiting for connection to redis database...") sleep(1) else: logging.info("SUCCESS: Connected to Redis database.") break # Kick dead sessions (cause socket.io reloaded) out of rooms for room_token, room in db.hgetall('rooms').items(): # For all rooms in db room = json.loads(room) # Deserialize room data room['count'] = 0 # Set connections to 0 because no body can be connected db.hset('rooms', room_token, json.dumps(room)) logging.info("Kick dead sessions out of room {}".format(room_token)) # Create cleanup deamon, that deletes abandoned rooms from the database # Get params cleanup_interval = int(os.getenv('CLEANUP_INTERVAL', 60 * 15)) cleanup_room_expire_time = int(os.getenv('CLEANUP_ROOM_EXPIRE_TIME', 60 * 60)) cleanup_max_room_life_time = int( os.getenv('CLEANUP_MAX_ROOM_LIFE_TIME', 60 * 60 * 24)) # Define function that cleans abandoned rooms in an interval def room_cleanup():