def fetch_history_from_redis(room_id, num_msgs=None, post_delete=False): """ Fetches the most recent num_msgs from Redis on a particular room """ try: room_name = room_id REDIS_CONNECTION = cache.get_client('') if num_msgs is None: history_bytes = REDIS_CONNECTION.lrange( cache.make_key(f'HISTORY_{room_name}'), 0, -1) else: if num_msgs <= 0: return [] else: history_bytes = REDIS_CONNECTION.lrange( cache.make_key(f'HISTORY_{room_name}'), 0, num_msgs - 1) history = list( json.loads(msg) for msg in history_bytes) # history is now a Python List of Dict if post_delete: REDIS_CONNECTION.delete(cache.make_key(f"HISTORY_{room_name}")) print("in clientwidget_updated fetch_history_from_redis: ", room_name, history) return history except Exception as ex: print(ex) return []
def append_msg_to_redis(room_name, message_dict, store_full=False, timeout=24 * 60 * 60): """ Appends the message dictionary from the websocket to the Redis Message List """ REDIS_CONNECTION = cache.get_client('') if store_full: REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'), json.dumps(message_dict)) REDIS_CONNECTION.expire(cache.make_key(f'HISTORY_{room_name}'), timeout) # Finally set a lock on this room name. We'll need it later for flushing to DB cache.set(f'CLIENTWIDGETLOCK_{room_name}', room_name, timeout=timeout) return if 'message' in message_dict: if isinstance(message_dict['message'], list): # If we want to store an array of parsed messages for msg in message_dict['message']: REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'), json.dumps(msg)) else: REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'), json.dumps(message_dict)) else: REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'), json.dumps(message_dict)) REDIS_CONNECTION.expire(cache.make_key(f'HISTORY_{room_name}'), timeout) # Finally set a lock on this room name. We'll need it later for flushing to DB cache.set(f'CLIENTWIDGETLOCK_{room_name}', room_name, timeout=timeout)
def cleanup_room_redis(room_name, reset_count=False, bot_type="website"): """Dumps the session content of the room into the DB """ connection = cache.get_client('') ext = cache.get(str(room_name), "default") # Get the room lock status from the cache lock = cache.get(f'CLIENTWIDGETROOMLOCK_{room_name}') if lock is None and bot_type == "website": return if lock == True or bot_type in ( "whatsapp", "facebook", ): # Dump to DB variables = cache.get("VARIABLES_" + room_name) messages_bytes = connection.lrange( cache.make_key("HISTORY_" + room_name), 0, -1) messages = list(json.loads(message) for message in messages_bytes) modified = False with transaction.atomic(): try: room_id = uuid.UUID(str(room_name)) instance = ChatRoom.objects.get(room_id=room_id) except ValueError: instance = ChatRoom.objects.get(room_name=room_name) if variables is not None: instance.variables = variables modified = True if messages is not None: if messages != []: instance.messages.extend(messages) if bot_type == 'website' and hasattr( instance, 'recent_messages') and isinstance( getattr(instance, 'recent_messages'), list): instance.recent_messages.extend(messages) modified = True if modified: instance.bot_is_active = False instance.end_time = timezone.now() instance.save(using=ext) if reset_count == True: # Reset the count to 0 cache.set(f"NUM_USERS_{room_name}", 0) # Delete the locks cache.delete(f'CLIENTWIDGETROOMLOCK_{room_name}') cache.delete(f'CLIENTWIDGETLOCK_{room_name}') # Delete the session history cache.delete(f"HISTORY_{room_name}") cache.delete(f"VARIABLES_{room_name}")
def update_session_redis(room_name, msg_number, content): """ Sets the key-value fields for a message on the redis store """ REDIS_CONNECTION = cache.get_client('') REDIS_CONNECTION.hmset(room_name + "_" + str(msg_number), content) # Also update the history # TODO: Store it as a single nested hash value REDIS_CONNECTION.hmset( cache.make_key(f"HISTORY_{room_name}_{msg_number % (N)}"), content)
def flush_session(room_name, batch_size): """ Deletes the messages related to the session on the redis cache """ # Flush the contents of the redis cache for this session REDIS_CONNECTION = cache.get_client('') for key_batch in fetch_redis_batch( REDIS_CONNECTION.scan_iter(cache.make_key(f"{room_name}_*")), batch_size): for key in key_batch: if key is None: break REDIS_CONNECTION.delete(key)
def delete_history_from_redis(room_name, num_msgs=None): """ Deletes history from the Redis Cache """ REDIS_CONNECTION = cache.get_client('') if num_msgs is None: res = REDIS_CONNECTION.delete(cache.make_key(f'HISTORY_{room_name}')) else: if num_msgs <= 0: return 1, None else: res = REDIS_CONNECTION.ltrim( cache.make_key(f'HISTORY_{room_name}'), num_msgs, -1) return res, None
def atomic_get(key): """ Atomically gets the most recent {key : value} pair from the redis store """ REDIS_CONNECTION = cache.get_client('') try: with REDIS_CONNECTION.pipeline() as pipe: try: pipe.watch(key) pipe.multi() pipe.get(key) return pipe.execute()[-1], False except WatchError: return pipe.get(key), True except TypeError: return REDIS_CONNECTION.get(key), True
def get_anchor(self, request, **__): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) query = request.GET.get('q', '') try: redis_client = cache.get_client(None) redis_data = redis_client.keys("*redirects:v4*%s*" % query) except (AttributeError, redis.exceptions.ConnectionError): redis_data = [] # -2 because http: urls = [''.join(data.split(':')[6:]) for data in redis_data if 'http://' in data] object_list = {'objects': urls} self.log_throttled_access(request) return self.create_response(request, object_list)
def get_anchor(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) query = request.GET.get('q', '') try: redis_client = cache.get_client(None) redis_data = redis_client.keys("*redirects:v4*%s*" % query) except (AttributeError, redis.exceptions.ConnectionError): redis_data = [] # -2 because http: urls = [''.join(data.split(':')[6:]) for data in redis_data if 'http://' in data] object_list = {'objects': urls} self.log_throttled_access(request) return self.create_response(request, object_list)
def atomic_set(key, value, timeout=24 * 60 * 60): """ Atomically sets {key: value} on the redis store """ REDIS_CONNECTION = cache.get_client('') try: with REDIS_CONNECTION.pipeline() as pipe: try: pipe.watch(key) pipe.multi() pipe.set(key, value) pipe.expire(key, timeout) pipe.get(key) return pipe.execute()[-1], False except WatchError: return pipe.get(key), True except TypeError: return REDIS_CONNECTION.get(key), True
def index_theme_data(sender, **kwargs): """ Keep track of which projects are using which theme. This is primarily used so we can send email to folks using alabaster, and other themes we might want to display ads on. This will allow us to give people fair warning before we put ads on their docs. """ context = kwargs['context'] project = context['project'] theme = context['theme'] try: redis_client = cache.get_client(None) redis_client.sadd("readthedocs:v1:index:themes:%s" % theme, project.slug) except (AttributeError, redis.exceptions.ConnectionError): log.warning('Redis theme indexing error: %s', exc_info=True)
def index_theme_data(sender, **kwargs): """ Keep track of which projects are using which theme. This is primarily used so we can send email to folks using alabaster, and other themes we might want to display ads on. This will allow us to give people fair warning before we put ads on their docs. """ del sender # unused context = kwargs['context'] project = context['project'] theme = context['theme'] try: redis_client = cache.get_client(None) redis_client.sadd("readthedocs:v1:index:themes:%s" % theme, project.slug) except (AttributeError, redis.exceptions.ConnectionError): log.warning('Redis theme indexing error: %s', exc_info=True)
def purge_version(version, mainsite=False, subdomain=False, cname=False): varnish_servers = getattr(settings, 'VARNISH_SERVERS', None) h = Http() if varnish_servers: for server in varnish_servers: if subdomain: # Send a request to the Server, to purge the URL of the Host. host = "%s.readthedocs.org" % version.project.slug headers = {'Host': host} url = "/en/%s/*" % version.slug to_purge = "http://%s%s" % (server, url) log.info("Purging %s on %s", url, host) h.request(to_purge, method="PURGE", headers=headers) if mainsite: headers = {'Host': "readthedocs.org"} url = "/docs/%s/en/%s/*" % (version.project.slug, version.slug) to_purge = "http://%s%s" % (server, url) log.info("Purging %s on readthedocs.org", url) h.request(to_purge, method="PURGE", headers=headers) root_url = "/docs/%s/" % version.project.slug to_purge = "http://%s%s" % (server, root_url) log.info("Purging %s on readthedocs.org", root_url) h.request(to_purge, method="PURGE", headers=headers) if cname: try: redis_client = cache.get_client(None) for cnamed in redis_client.smembers('rtd_slug:v1:%s' % version.project.slug): headers = {'Host': cnamed} url = "/en/%s/*" % version.slug to_purge = "http://%s%s" % (server, url) log.info("Purging %s on %s", url, cnamed) h.request(to_purge, method="PURGE", headers=headers) root_url = "/" to_purge = "http://%s%s" % (server, root_url) log.info("Purging %s on %s", root_url, cnamed) h.request(to_purge, method="PURGE", headers=headers) except (AttributeError, redis.exceptions.ConnectionError): pass
def redis_server_cleanup(): connection = cache.get_client('') generator = fetch_batch(connection, cache.make_key("CLIENTWIDGETLOCK_")) while True: # Clear up any existing sessions try: batch = next(generator) # Dump to DB for key in batch: room_name = json.loads(connection.get(key)) variables = cache.get("VARIABLES_" + room_name) messages_bytes = connection.lrange(cache.make_key("HISTORY_" + room_name), 0, -1) messages = list(json.loads(message) for message in messages_bytes)[::-1] modified = False with transaction.atomic(): instance = ChatRoom.objects.get(room_name=room_name) if variables is not None: instance.variables = variables modified = True if messages is not None: if messages != []: instance.messages.extend(messages) modified = True if modified: instance.save() except StopIteration: break clear_ns(connection, cache.make_key("BOT_PREVIEW_VARIABLE_")) clear_ns(connection, cache.make_key("NUM_USERS_")) clear_ns(connection, cache.make_key("HISTORY_")) # Clear all sessions stored_sessions = Session.objects.all() for session in stored_sessions: SessionStore = import_module(settings.SESSION_ENGINE).SessionStore sess = SessionStore(session_key=session.session_key) session_uid = session.get_decoded().get('_auth_user_id') sess.delete()
from django.utils.encoding import force_bytes, iri_to_uri COOKIE_KEYS = getattr(settings, 'VIEWCACHE_COOKIE_KEYS', ()) MINUTE = 60 HOUR = MINUTE * 60 DAY = HOUR * 24 redis_client = redis.StrictRedis.from_url(settings.QUERYCACHE_REDIS) try: redis_client.time() except redis.ConnectionError: redis_client = cache.get_client('default') def get_model_name(model): return ('%s.%s' % (model._meta.app_label, model._meta.model_name)).lower() def get_view_name(view): return ('%s.%s' % (view.__module__, view.__name__)).lower() def format_key(*args): return ':'.join([str(arg) for arg in args]) def safe_pickle_load(value, default=None):
def redis_server_reset_rooms(): connection = cache.get_client('') clear_ns(connection, cache.make_key("NUM_USERS_"))
def __init__(self, name): self.redis_client = cache.get_client(1) self.name = name