def get_saved_keys(account_id: int, reload: bool = False): redis = get_conn() key = 'saved_keys:' + str(account_id) saved_keys = redis.get(key) result = None if saved_keys is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() args_dict = dict(account_id=str(account_id)) query = """ SELECT id, service, discord_channel_ids, added, dead FROM saved_session_keys_with_hashes WHERE contributor_id = %(account_id)s ORDER BY added DESC """ cursor.execute(query, args_dict) result = cursor.fetchall() redis.set(key, serialize_dict_list(result), ex=3600) lock.release() else: time.sleep(0.1) return get_saved_keys(account_id, reload=reload) else: result = deserialize_dict_list(saved_keys) saved_keys = [Service_Key.from_dict(service_key) for service_key in result] return saved_keys
def get_saved_key_import_ids(key_id, reload=False): redis = get_conn() key = 'saved_key_import_ids:' + str(key_id) saved_key_import_ids = redis.get(key) if saved_key_import_ids is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() # TODO: select columns query = """ SELECT * FROM saved_session_key_import_ids WHERE key_id = %s """ cursor.execute(query, (int(key_id), )) saved_key_import_ids = cursor.fetchall() redis.set(key, serialize_dict_list(saved_key_import_ids), ex=3600) lock.release() else: time.sleep(0.1) return get_saved_key_import_ids(key_id, reload=reload) else: saved_key_import_ids = deserialize_dict_list(saved_key_import_ids) return saved_key_import_ids
def get_artists(pagination_db: TDPaginationDB, params: TDArtistsParams = default_params, reload: bool = False) -> List[TDArtist]: """ Get all artist information. @TODO return dataclass """ redis = get_conn() service = params["service"] sort_by = params["sort_by"] # encoded_name = encode_text_query(params["name"]) redis_key = construct_artists_key( *("service", service) if service else "", *("sort_by", sort_by), # *("name", encoded_name) if name else "", str(pagination_db["pagination_init"]["current_page"])) artists = redis.get(redis_key) if artists is not None and not reload: return deserialize_dict_list(artists) lock = KemonoRedisLock(redis, redis_key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_artists(pagination_db, params, reload=reload) cursor = get_cursor() arg_dict = dict( offset=pagination_db["offset"], limit=pagination_db["sql_limit"], service=service, # name=name ) # name_query = f"AND to_tsvector('english', name) @@ websearch_to_tsquery(%(name)s)" if name else "" sort_query = sort_queries[sort_by] query = f""" SELECT id, indexed, name, service, updated FROM lookup WHERE service != 'discord-channel' { "AND service = %(service)s" if service else "" } ORDER BY {sort_query} OFFSET %(offset)s LIMIT %(limit)s """ cursor.execute(query, arg_dict) artists: List[TDArtist] = cursor.fetchall() redis.set(redis_key, serialize_dict_list(artists), ex=600) lock.release() return artists
def get_banned_artists(pagination_db: TDPaginationDB, reload: bool = False): redis = get_conn() redis_key = construct_banned_artists_key( str(pagination_db["pagination_init"]["current_page"])) banned_artists = redis.get(redis_key) result = None if banned_artists is not None and not reload: result = deserialize_dict_list(banned_artists) return result lock = KemonoRedisLock(redis, redis_key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_banned_artists( pagination_db, reload=reload, ) cursor = get_cursor() query_args = dict(offset=pagination_db["offset"], limit=pagination_db["sql_limit"]) query = """ SELECT artist.id, artist.indexed, artist.name, artist.service, artist.updated FROM dnp as banned, lookup as artist WHERE banned.id = artist.id AND banned.service = artist.service OFFSET %(offset)s LIMIT %(limit)s """ cursor.execute(query, query_args) result: List[TDArtist] = cursor.fetchall() redis.set(redis_key, serialize_dict_list(banned_artists), ex=600) lock.release() return result
def get_all_posts(offset: int, reload=False): redis = get_conn() key = 'all_posts:' + str(offset) all_posts = redis.get(key) if all_posts is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = 'SELECT * FROM posts ORDER BY added desc OFFSET %s LIMIT 25' cursor.execute(query, (offset, )) all_posts = cursor.fetchall() redis.set(key, serialize_dict_list(all_posts), ex=600) lock.release() else: time.sleep(0.1) return get_all_posts(offset, reload=reload) else: all_posts = deserialize_dict_list(all_posts) return all_posts
def get_all_posts_for_query(q: str, offset: int, reload=False): if q.strip() == '': return get_all_posts(0) redis = get_conn() key = 'all_posts_for_query:' + q + ':' + str(offset) results = redis.get(key) if results is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = "SET LOCAL enable_indexscan = off; " query += "SELECT * FROM posts WHERE to_tsvector('english', content || ' ' || title) @@ websearch_to_tsquery(%s) ORDER BY added desc LIMIT 25 OFFSET %s" params = (q, offset) cursor.execute(query, params) results = cursor.fetchall() redis.set(key, serialize_dict_list(results), ex=600) lock.release() else: time.sleep(0.1) return get_all_posts_for_query(q, offset, reload=reload) else: results = deserialize_dict_list(results) return results