def get_artist_post_count(service, artist_id, reload=False): redis = get_conn() key = 'artist_post_count:' + service + ':' + str(artist_id) count = redis.get(key) if count is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = 'SELECT count(*) as count FROM posts WHERE \"user\" = %s AND service = %s' cursor.execute(query, (artist_id, service,)) count = cursor.fetchone()['count'] redis.set(key, str(count), ex=600) lock.release() else: time.sleep(0.1) return get_artist_post_count(service, artist_id, reload=reload) else: count = int(count) return count
def get_artists_by_service(service, reload=False): redis = get_conn() key = 'artists_by_service:' + service artists = redis.get(key) if artists is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = "SELECT * FROM lookup WHERE service = %s" cursor.execute(query, (service,)) artists = cursor.fetchall() redis.set(key, serialize_artists(artists), ex=600) lock.release() else: time.sleep(0.1) return get_artists_by_service(service, reload=reload) else: artists = deserialize_artists(artists) return artists
def get_artist(service: str, artist_id: str, reload: bool = False) -> dict: redis = get_conn() key = 'artist:' + service + ':' + str(artist_id) artist = redis.get(key) if artist is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = 'SELECT * FROM lookup WHERE id = %s AND service = %s' cursor.execute(query, (artist_id, service,)) artist = cursor.fetchone() redis.set(key, serialize_artist(artist), ex=600) lock.release() else: time.sleep(0.1) return get_artist(service, artist_id, reload=reload) else: artist = deserialize_artist(artist) return artist
def lookup(): if (request.args.get('q') is None): return make_response('Bad request', 400) cursor = get_cursor() query = "SELECT * FROM lookup " params = () query += "WHERE name ILIKE %s " params += ('%' + request.args.get('q') + '%',) if (request.args.get('service')): query += "AND service = %s " params += (request.args.get('service'),) limit = request.args.get('limit') if request.args.get('limit') and int(request.args.get('limit')) <= 150 else 50 query += "LIMIT %s" params += (limit,) cursor.execute(query, params) results = cursor.fetchall() response = make_response(jsonify(list(map(lambda x: x['id'], results))), 200) return response
def get_random_artist_keys(count, reload=False): redis = get_conn() key = 'random_artist_keys:' + str(count) artist_keys = redis.get(key) if artist_keys is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = "SELECT id, service FROM lookup WHERE service != 'discord-channel' ORDER BY random() LIMIT %s" cursor.execute(query, (count,)) artist_keys = cursor.fetchall() redis.set(key, ujson.dumps(artist_keys), ex=600) lock.release() else: time.sleep(0.1) return get_random_artist_keys(count, reload=reload) else: artist_keys = ujson.loads(artist_keys) return artist_keys
def count_accounts(queries: Dict[str, str]) -> int: arg_dict = { 'role': queries['role'], 'username': f"%%{queries['name']}%%" if queries.get('name') is not None else None } cursor = get_cursor() query = f""" SELECT COUNT(*) AS total_number_of_accounts FROM account WHERE role = ANY(%(role)s) {'AND username LIKE %(username)s' if queries.get('name') is not None else ''} """ cursor.execute(query, arg_dict) result = cursor.fetchone() number_of_accounts = result['total_number_of_accounts'] return number_of_accounts
def count_account_notifications(account_id: int) -> int: """ TODO: fix `psycopg2.ProgrammingError: no results to fetch` error """ try: args_dict = {"account_id": account_id} cursor = get_cursor() query = """ SELECT COUNT(*) AS notifications_count FROM notifications WHERE account_id = %(account_id)s """ cursor.execute(query, args_dict) result = cursor.fetchone() notifications_count = result["notifications_count"] return notifications_count except: return 0
def user_search(service, user): if (request.args.get('q') and len(request.args.get('q')) > 35): return make_response('Bad request', 400) cursor = get_cursor() query = "SELECT * FROM posts WHERE \"user\" = %s AND service = %s " params = (user, service) query += "AND to_tsvector(content || ' ' || title) @@ websearch_to_tsquery(%s) " params += (request.args.get('q'),) query += "ORDER BY published desc " offset = request.args.get('o') if request.args.get('o') else 0 query += "OFFSET %s " params += (offset,) limit = request.args.get('limit') if request.args.get('limit') and int(request.args.get('limit')) <= 150 else 25 query += "LIMIT %s" params += (limit,) cursor.execute(query, params) results = cursor.fetchall() return jsonify(results)
def get_all_dms_by_query(text_query: str, offset: int, limit: int, reload: bool = False) -> List[Approved_DM]: transformed_query = base64.b64encode( text_query.encode('utf-8')).decode('utf-8') redis = get_conn() key = f'all_dms_by_query:{transformed_query}:{str(offset)}' dms = redis.get(key) result = None if dms and not reload: result = deserialize_dms(dms) return [Approved_DM.from_dict(dm) for dm in result] if result else [] lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_all_dms_by_query(text_query, offset, limit, reload=reload) cursor = get_cursor() query_args = dict(text_query=text_query, offset=offset, limit=limit) query = """ SELECT id, "user", service, content, embed, file, added, published FROM dms WHERE to_tsvector(\'english\', content) @@ websearch_to_tsquery(%(text_query)s) ORDER BY added DESC OFFSET %(offset)s LIMIT %(limit)s """ cursor.execute(query, query_args) result = cursor.fetchall() redis.set(key, serialize_dms(result), ex=600) lock.release() dms = [Approved_DM.from_dict(dm) for dm in result] if result else [] return dms
def get_favorite_posts(account_id, reload=False): redis = get_conn() key = 'favorite_posts:' + str(account_id) favorites = redis.get(key) if favorites is None or reload: cursor = get_cursor() query = "select id, service, artist_id, post_id from account_post_favorite where account_id = %s" cursor.execute(query, (account_id, )) favorites = cursor.fetchall() redis.set(key, serialize_dict_list(favorites)) else: favorites = deserialize_dict_list(favorites) posts = [] for favorite in favorites: post = get_post(favorite['post_id'], favorite['artist_id'], favorite['service']) if post is not None: post['faved_seq'] = favorite['id'] posts.append(post) return posts
def send_notifications(account_ids: List[str], notification_type: int, extra_info: Optional[TypedDict]) -> bool: cursor = get_cursor() if not account_ids: return False if extra_info is not None: extra_info = dumps(extra_info) notification_values = f"(%s, {notification_type}, '{extra_info}')" else: notification_values = f"(%s, {notification_type}, NULL)" insert_queries_values_template = ",".join([notification_values] * len(account_ids)) insert_query = f""" INSERT INTO notifications (account_id, type, extra_info) VALUES {insert_queries_values_template} ; """ cursor.execute(insert_query, account_ids) return True
def get_unapproved_dms(import_id: str, account_id: int, reload: bool = False) -> List[Unapproved_DM]: """ TODO: fix `account_id` type """ redis = get_conn() key = f'unapproved_dms:{import_id}:{str(account_id)}' dms = redis.get(key) result = None if dms and not reload: result = deserialize_dms(dms) return [Unapproved_DM.from_dict(dm) for dm in result if result] lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_unapproved_dms(import_id, account_id, reload=reload) cursor = get_cursor() args_dict = dict(import_id=import_id, account_id=str(account_id)) query = """ SELECT id, import_id, contributor_id, "user", service, content, embed, added, published, file FROM unapproved_dms WHERE import_id = %(import_id)s AND contributor_id = %(account_id)s """ cursor.execute(query, args_dict) result = cursor.fetchall() redis.set(key, serialize_dms(result), ex=1) lock.release() dms = [Unapproved_DM.from_dict(dm) for dm in result] if result else [] return dms
def approve_dm(import_id: str, dm_id: str): cursor = get_cursor() query_args = dict(import_id=import_id, dm_id=dm_id) query = """ INSERT INTO dms (id, "user", service, content, embed, added, published, file) SELECT id, "user", service, content, embed, added, published, file FROM unapproved_dms WHERE import_id = %(import_id)s AND id = %(dm_id)s ; DELETE FROM unapproved_dms WHERE import_id = %(import_id)s AND id = %(dm_id)s ; """ cursor.execute(query, query_args) return True
def get_artists_by_update_time(offset, reload=False): redis = get_conn() key = 'artists_by_update_time:' + str(offset) artists = redis.get(key) if artists is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = "SELECT * FROM lookup WHERE service != 'discord-channel' ORDER BY updated desc " params = () query += "OFFSET %s " params += (offset,) query += "LIMIT 25" cursor.execute(query, (params,)) artists = cursor.fetchall() redis.set(key, serialize_artists(artists), ex=600) lock.release() else: time.sleep(0.1) get_artists_by_update_time(offset, reload=reload) else: artists = deserialize_artists(artists) return artists
def get_all_dms(offset: int, limit: int, reload: bool = False) -> List[Approved_DM]: redis = get_conn() key = f'all_dms:{str(offset)}' dms = redis.get(key) result = None if dms and not reload: result = deserialize_dms(dms) return [Approved_DM.from_dict(dm) for dm in result] if result else [] lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_all_dms(offset, limit, reload=reload) cursor = get_cursor() query_args = dict(offset=offset, limit=limit) query = """ SELECT id, "user", service, content, embed, file, added, published FROM dms ORDER BY added DESC OFFSET %(offset)s LIMIT %(limit)s """ cursor.execute(query, query_args) result = cursor.fetchall() redis.set(key, serialize_dms(result), ex=600) lock.release() dms = [Approved_DM.from_dict(dm) for dm in result] if result else [] return dms
def is_post_flagged(service, artist_id, post_id, reload=False): redis = get_conn() key = 'is_post_flagged:' + service + ':' + str(artist_id) + ':' + str( post_id) flagged = redis.get(key) if flagged is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = "SELECT * FROM booru_flags WHERE id = %s AND \"user\" = %s AND service = %s" cursor.execute(query, ( post_id, artist_id, service, )) flagged = cursor.fetchone() is not None redis.set(key, str(flagged), ex=600) lock.release() else: time.sleep(0.1) return is_post_flagged(service, artist_id, post_id, reload=reload) else: flagged = flagged.decode('utf-8') == 'True' return flagged
def get_artist_dms(service: str, artist_id: int, reload: bool = False) -> List[Approved_DM]: redis = get_conn() key = f'dms:{service}:{str(artist_id)}' dms = redis.get(key) result = None if dms and not reload: result = deserialize_dms(dms) return [Approved_DM.from_dict(dm) for dm in result] if result else [] lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if not lock.acquire(blocking=False): time.sleep(0.1) return get_artist_dms(service, artist_id, reload=reload) cursor = get_cursor() query_args = dict(service=service, artist_id=artist_id) query = """ SELECT id, "user", service, content, embed, file, added, published FROM dms WHERE service = %(service)s AND "user" = %(artist_id)s """ cursor.execute(query, query_args) result = cursor.fetchall() redis.set(key, serialize_dms(result), ex=600) lock.release() dms = [Approved_DM.from_dict(dm) for dm in result] if result else [] return dms
def get_artist_last_updated(service, artist_id, reload=False): redis = get_conn() key = 'artist_last_updated:' + service + ':' + str(artist_id) last_updated = redis.get(key) if last_updated is None or reload: lock = KemonoRedisLock(redis, key, expire=60, auto_renewal=True) if lock.acquire(blocking=False): cursor = get_cursor() query = 'SELECT max(added) as max FROM posts WHERE service = %s AND "user" = %s' cursor.execute(query, (service, artist_id,)) last_updated = cursor.fetchone() if get_value(last_updated, 'max') is not None: last_updated = last_updated['max'] else: last_updated = datetime.min redis.set(key, last_updated.isoformat(), ex=600) lock.release() else: time.sleep(0.1) get_artist_last_updated(service, artist_id, reload=reload) else: last_updated = dateutil.parser.parse(last_updated) return last_updated
def change_account_role( account_ids: List[str], extra_info: ACCOUNT_ROLE_CHANGE ): cursor = get_cursor() arg_dict = dict( account_ids=account_ids, new_role=extra_info["new_role"] ) change_role_query = """ UPDATE account SET role = %(new_role)s WHERE id = ANY (%(account_ids)s) """ cursor.execute(change_role_query, arg_dict) send_notifications( account_ids, Notification_Types.ACCOUNT_ROLE_CHANGE, extra_info ) return True
def upload(): resumable_dict = { 'resumableIdentifier': request.form.get('resumableIdentifier'), 'resumableFilename': request.form.get('resumableFilename'), 'resumableTotalSize': request.form.get('resumableTotalSize'), 'resumableTotalChunks': request.form.get('resumableTotalChunks'), 'resumableChunkNumber': request.form.get('resumableChunkNumber') } if int(request.form.get('resumableTotalSize')) > int(getenv('UPLOAD_LIMIT')): return "File too large.", 415 makedirs('/tmp/uploads', exist_ok=True) makedirs('/tmp/uploads/incomplete', exist_ok=True) resumable = UploaderFlask( resumable_dict, '/tmp/uploads', '/tmp/uploads/incomplete', request.files['file'] ) resumable.upload_chunk() if resumable.check_status() is True: resumable.assemble_chunks() try: resumable.cleanup() except: pass try: host = getenv('ARCHIVERHOST') port = getenv('ARCHIVERPORT') if getenv('ARCHIVERPORT') else '8000' r = requests.post( f'http://{host}:{port}/api/upload/uploads', files={'file': open(join('/tmp/uploads', request.form.get('resumableFilename')), 'rb')} ) final_path = r.text r.raise_for_status() except Exception: return 'Error while connecting to archiver.', 500 post_model = { 'id': ''.join(random.choice(string.ascii_letters) for x in range(8)), '"user"': request.form.get('user'), 'service': request.form.get('service'), 'title': request.form.get('title'), 'content': request.form.get('content') or "", 'embed': {}, 'shared_file': True, 'added': datetime.now(), 'published': datetime.now(), 'edited': None, 'file': { "name": basename(final_path), "path": final_path }, 'attachments': [] } post_model['embed'] = json.dumps(post_model['embed']) post_model['file'] = json.dumps(post_model['file']) columns = post_model.keys() data = ['%s'] * len(post_model.values()) data[-1] = '%s::jsonb[]' # attachments query = "INSERT INTO posts ({fields}) VALUES ({values})".format( fields=','.join(columns), values=','.join(data) ) cursor = get_cursor() cursor.execute(query, list(post_model.values())) return jsonify({ "fileUploadStatus": True, "resumableIdentifier": resumable.repo.file_id }) return jsonify({ "chunkUploadStatus": True, "resumableIdentifier": resumable.repo.file_id })
def get_login_info_for_username(username): cursor = get_cursor() query = 'SELECT id, password_hash FROM account WHERE username = %s' cursor.execute(query, (username, )) return cursor.fetchone()
def is_username_taken(username): cursor = get_cursor() query = 'SELECT id FROM account WHERE username = %s' cursor.execute(query, (username, )) return cursor.fetchone() is not None
import signal import requests from flask import Flask, request from flask_restful import Resource, Api from json import dumps from flask.ext.jsonpify import jsonify import src.database as database from src.credentials import * import subprocess CONN = database.db_connect(USER_DATABASE_URL) DB_ACCESS = {'conn': CONN, 'cur': CONN.cursor(), 'url': USER_DATABASE_URL} CURSOR = database.get_cursor(DB_ACCESS) # demo import src.register as register app = Flask(__name__) api = Api(app) global_process_map = {} class Main(Resource): def get(self): if request.args['mode'] == 'on': #bot.on rate = request.args['rate'] if request.args['fav'].lower() == 'true':
def add_favorite_post(account_id, service, artist_id, post_id): cursor = get_cursor() query = 'insert into account_post_favorite (account_id, service, artist_id, post_id) values (%s, %s, %s, %s) ON CONFLICT (account_id, service, artist_id, post_id) DO NOTHING' cursor.execute(query, (account_id, service, artist_id, post_id)) get_favorite_posts(account_id, True) is_post_favorited(account_id, service, artist_id, post_id, True)
def remove_favorite_post(account_id, service, artist_id, post_id): cursor = get_cursor() query = 'delete from account_post_favorite where account_id = %s and service = %s and artist_id = %s and post_id = %s' cursor.execute(query, (account_id, service, artist_id, post_id)) get_favorite_posts(account_id, True) is_post_favorited(account_id, service, artist_id, post_id, True)