def renameFolder(user, path, new_name): _verifyPath(path) _verifyFolderName(new_name) if path == "/": raise UserError('INVALID_PATH') with redis_lock.Lock( rdb, f"folderEdit:{str(makeUserMeta(user))}:{path}"), MongoTransaction( client) as s: folder_obj = _findFolder(user, path) filterOperation('renameFolder', user, folder_obj) parent_path, cur_folder = _parentPath(path) if '\\' in cur_folder: raise UserError('INVALID_PATH') if db.playlist_folders.find_one({ 'user': makeUserMeta(user), 'path': parent_path + new_name + '/' }): raise UserError('FOLDER_ALREADY_EXIST') parent_path_escaped = re.escape(parent_path) cur_folder_esacped = re.escape(cur_folder) query_regex = f'^{parent_path_escaped}{cur_folder_esacped}\\/.*' replace_regex = re.compile( f'^({parent_path_escaped})({cur_folder_esacped})(\\/.*)') paths = db.playlist_folders.find( { 'user': makeUserMeta(user), 'path': { '$regex': query_regex } }, session=s()) db.playlist_folders.update_one( { 'user': makeUserMeta(user), 'path': { '$regex': f'^{parent_path_escaped}{cur_folder_esacped}\\/$' } }, {'$set': { 'name': new_name }}, session=s()) new_name_escaped = re.escape(new_name) for p in paths: new_path = replace_regex.sub(rf'\g<1>{new_name}\g<3>', p['path']) db.playlist_folders.update_one({'_id': p['_id']}, {'$set': { 'path': new_path }}, session=s()) db.playlist_folders.update_one( { 'user': makeUserMeta(user), 'path': { '$regex': query_regex } }, { '$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() } }, session=s()) s.mark_succeed() return parent_path + new_name + '/'
async def _add_to_playlist(self, dst_playlist, event_id, user_global): if self.playlist_map[dst_playlist]: dst_rank = self.playlist_map[dst_playlist]['rank'] playlist_ordered = self.playlist_map[dst_playlist]['all'] try: # fast method async with RedisLockAsync( rdb, "playlistEdit:" + dst_playlist), MongoTransaction(client) as s: cur_rank = 0 playlist = db.playlists.find_one( {'_id': ObjectId(dst_playlist)}) if playlist is None: raise UserError('PLAYLIST_NOT_EXIST') if playlist["videos"] + len( self.playlist_map[dst_playlist] ['succeed']) > PlaylistConfig.MAX_VIDEO_PER_PLAYLIST: raise UserError('VIDEO_LIMIT_EXCEEDED') playlist_videos = playlist['videos'] for unique_id in playlist_ordered: if unique_id in self.playlist_map[dst_playlist][ 'succeed']: (video_id, _, user) = self.playlist_map[ dst_playlist]['succeed'][unique_id] if dst_rank == -1: if filterOperation('editPlaylist', user, playlist, False): if addVideoToPlaylistLockFree( dst_playlist, video_id, user, playlist_videos, session=s()): playlist_videos += 1 else: if filterOperation('editPlaylist', user, playlist, False): if insertIntoPlaylistLockFree(dst_playlist, video_id, dst_rank + cur_rank, user, session=s()): cur_rank += 1 s.mark_succeed() except UserError as ue: # UserError, rereaise to upper level log_e(event_id, user_global, '_add_to_playlist', 'ERR', { 'ex': str(ex), 'tb': traceback.format_exc() }) del self.playlist_map[dst_playlist] rdb.set(f'playlist-batch-post-event-{dst_playlist}', b'done') raise ue except Exception as ex: # if anything goes wrong, fallback to slow method log_e(event_id, user_global, '_add_to_playlist', 'ERR', { 'ex': str(ex), 'tb': traceback.format_exc() }) cur_rank = 0 for unique_id in playlist_ordered: if unique_id in self.playlist_map[dst_playlist]['succeed']: (video_id, _, user) = self.playlist_map[dst_playlist][ 'succeed'][unique_id] # ignore error, add next video try: if dst_rank == -1: addVideoToPlaylist(dst_playlist, video_id, user) else: insertIntoPlaylist(dst_playlist, video_id, dst_rank + cur_rank, user) cur_rank += 1 except: pass log_e( event_id, user_global, '_add_to_playlist', 'MSG', { 'succedd': len(self.playlist_map[dst_playlist]['succeed']), 'all': len(self.playlist_map[dst_playlist]['all']), 'pid': dst_playlist }) del self.playlist_map[dst_playlist] rdb.set(f'playlist-batch-post-event-{dst_playlist}', b'done')
def listAllPlaylistVideosUnordered(pid) : playlist = db.playlists.find_one({'_id': ObjectId(pid)}) if playlist is None : raise UserError('PLAYLIST_NOT_EXIST') ans_obj = db.playlist_items.find({"pid": ObjectId(pid)}) return [ObjectId(item['vid']) for item in ans_obj], playlist['videos']
def _verifyPath(path) : if path : if path[0] == '/' and path[-1] == '/' : return True raise UserError('INVALID_PATH')
def addReply(user, reply_to: ObjectId, text: str): # user can add comments """ reply_to: comment id """ filterOperation('postComment', user) # TODO notify user being replied to l = len(text) if l > Comments.MAX_COMMENT_LENGTH_LONG: raise UserError('COMMENT_TOO_LONG') elif l > Comments.MAX_COMMENT_LENGTH_REGULAR and not filterOperation( 'postLongComment', user, raise_exception=False): raise UserError('COMMENT_TOO_LONG') parent_obj = db.comment_items.find_one({'_id': reply_to}) if parent_obj is None: raise UserError('PARENT_NOT_EXIST') with MongoTransaction(client) as s: if 'thread' in parent_obj: # reply to primary comment cid = str( db.comment_items.insert_one( { 'parent': reply_to, 'content': text, 'hidden': False, 'deleted': False, 'upvotes': 0, 'downvotes': 0, 'meta': makeUserMetaObject(user) }, session=s()).inserted_id) thread_obj = db.comment_threads.find_one( {'_id': parent_obj['thread']}, session=s()) if thread_obj is None: log(level='ERR', obj={ 'msg': 'orphan comment found!!', 'cid': parent_obj['_id'], 'obj': parent_obj }) raise UserError('UNKNOWN_ERROR') thread_id = thread_obj['_id'] else: # reply to secondary comment cid = str( db.comment_items.insert_one( { 'parent': parent_obj['parent'], 'reply_to': reply_to, 'content': text, 'hidden': False, 'deleted': False, 'upvotes': 0, 'downvotes': 0, 'meta': makeUserMetaObject(user) }, session=s()).inserted_id) parent_parent_obj = db.comment_items.find_one( {'_id': parent_obj['parent']}, session=s()) if parent_parent_obj is None: log(level='ERR', obj={ 'msg': 'orphan comment found!!', 'cid': parent_obj['_id'], 'obj': parent_obj }) raise UserError('UNKNOWN_ERROR') thread_obj = db.comment_threads.find_one( {'_id': parent_parent_obj['thread']}, session=s()) if thread_obj is None: log(level='ERR', obj={ 'msg': 'orphan comment found!!', 'cid': parent_parent_obj['_id'], 'obj': parent_parent_obj }) raise UserError('UNKNOWN_ERROR') thread_id = thread_obj['_id'] note_obj = { "cid": ObjectId(cid), "replied_by": makeUserMeta(user), "content": text[:Comments.NOTIFICATION_CONTENT_LENGTH] } # =========================================================== if 'obj_type' in thread_obj and 'obj_id' in thread_obj: note_obj['replied_type'] = thread_obj['obj_type'] note_obj['replied_obj'] = thread_obj['obj_id'] else: obj = db.items.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'video' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one( {'_id': thread_id}, {'$set': { 'obj_type': 'video', 'obj_id': obj['_id'] }}, session=s()) else: obj = db.playlists.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'playlist' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one({'_id': thread_id}, { '$set': { 'obj_type': 'playlist', 'obj_id': obj['_id'] } }, session=s()) else: obj = db.users.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'user' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one({'_id': thread_id}, { '$set': { 'obj_type': 'user', 'obj_id': obj['_id'] } }, session=s()) else: log(level='ERR', obj={ 'msg': 'orphan thread found!!', 'thread_id': thread_id, 'thread_obj': thread_obj }) raise UserError('UNKNOWN_ERROR') # =========================================================== createNotification('comment_reply', parent_obj['meta']['created_by'], session=s(), other=note_obj) s.mark_succeed()
def _check_category(self, category, session) : cat = self.db.cats.find_one({'name': category}, session = session) if cat is None: raise UserError('CATEGORY_NOT_EXIST') return cat
def listVideoQuery(user, query_str, page_idx, page_size, order='latest', user_language='CHS', hide_placeholder=True, qtype='tag'): log( obj={ 'q': query_str, 'page': page_idx, 'page_size': page_size, 'order': order, 'lang': user_language }) if order not in ['latest', 'oldest', 'video_latest', 'video_oldest']: raise UserError('INCORRECT_ORDER') query_obj, tags = db.compile_query(query_str, qtype) log(obj={'query': dumps(query_obj)}) default_blacklist_tagids = [ int(i) for i in Config.DEFAULT_BLACKLIST.split(',') ] if user and 'settings' in user: if user['settings']['blacklist'] == 'default': query_obj = { '$and': [query_obj, { 'tags': { '$nin': default_blacklist_tagids } }] } else: query_obj = { '$and': [query_obj, { 'tags': { '$nin': user['settings']['blacklist'] } }] } elif user is None: query_obj = { '$and': [query_obj, { 'tags': { '$nin': default_blacklist_tagids } }] } updateTagSearch(tags) try: result = db.retrive_items(query_obj) if order == 'latest': result = result.sort([("meta.created_at", -1)]) if order == 'oldest': result = result.sort([("meta.created_at", 1)]) if order == 'video_latest': result = result.sort([("item.upload_time", -1)]) if order == 'video_oldest': result = result.sort([("item.upload_time", 1)]) ret = result.skip(page_idx * page_size).limit(page_size) count = ret.count() videos = [item for item in ret] videos = filterVideoList(videos, user) if hide_placeholder: videos = _filterPlaceholder(videos) except pymongo.errors.OperationFailure as ex: if '$not' in str(ex): raise UserError('FAILED_NOT_OP') else: log(level='ERR', obj={'ex': str(ex)}) raise UserError('FAILED_UNKNOWN') return videos, getCommonTags(user_language, videos), count
def getUserDeniedOps(user_id, user): filterOperation('getUserDeniedOps', user, user_id) old_user_obj = db.users.find_one({'_id': ObjectId(user_id)}) if old_user_obj is None: raise UserError('USER_NOT_EXIST') return old_user_obj['access_control']['denied_ops']
def parse_search(txt): #if len(txt) <= 2 : # return {} words = cut_for_search(txt) if not words: raise UserError('NO_MATCH_FOUND') found_word_objs = list(db.index_words.find({'word': {'$in': words}})) words_map = {w: False for w in words} for found_word_obj in found_word_objs: words_map[found_word_obj['word']] = True words_not_found = [] for k, v in words_map.items(): if not v: words_not_found.append(k) prefix_word_query_objs, prefix_type = _prefix_fallthrough(words_not_found) direct_word_ids = [int(i['_id']) | 0x80000000 for i in found_word_objs] if prefix_type != 'empty': if direct_word_ids: if prefix_type == 'any-tags': return { 'tags': { '$all': direct_word_ids, '$in': prefix_word_query_objs } }, 'complex-query' elif prefix_type == 'all-tags': merged = direct_word_ids + prefix_word_query_objs if len(merged) == 1: return {'tags': merged[0]}, 'single-tag' else: return { 'tags': { '$all': direct_word_ids + prefix_word_query_objs } }, 'all-tags' else: return { '$and': [{ 'tags': { '$all': direct_word_ids } }, prefix_word_query_objs] }, 'complex-query' else: if prefix_type == 'any-tags': if len(prefix_word_query_objs) == 1: return {'tags': prefix_word_query_objs[0]}, 'single-tag' else: return { 'tags': { '$in': prefix_word_query_objs } }, 'any-tags' elif prefix_type == 'all-tags': if len(prefix_word_query_objs) == 1: return {'tags': prefix_word_query_objs[0]}, 'single-tag' else: return { 'tags': { '$all': prefix_word_query_objs } }, 'all-tags' else: return prefix_word_query_objs, 'complex-query' else: if len(direct_word_ids) == 1: return {'tags': direct_word_ids[0]}, 'single-tag' else: return {'tags': {'$all': direct_word_ids}}, 'all-tags'
def signup(username, password, email, challenge, signup_session_id): log( obj={ 'username': username, 'email': email, 'challenge': challenge, 'signup_session_id': signup_session_id }) if len(username) > UserConfig.MAX_USERNAME_LENGTH: raise UserError('USERNAME_TOO_LONG') if len(username) < UserConfig.MIN_USERNAME_LENGTH: raise UserError('USERNAME_TOO_SHORT') if len(password) > UserConfig.MAX_PASSWORD_LENGTH: raise UserError('PASSWORD_TOO_LONG') if len(password) < UserConfig.MIN_PASSWORD_LENGTH: raise UserError('PASSWORD_TOO_SHORT') if verify_session(signup_session_id, 'SIGNUP'): if email: if len(email) > UserConfig.MAX_EMAIL_LENGTH or not re.match( r"[^@]+@[^@]+\.[^@]+", email): raise UserError('INCORRECT_EMAIL') crypto_method, password_hashed, salt1, salt2, master_key_encryptyed = generate_user_crypto_PBKDF2( password) with redis_lock.Lock(rdb, 'signup:' + username): user_obj_find = db.users.find_one({'profile.username': username}) if user_obj_find is not None: raise UserError('USER_EXIST') if email: user_obj_email = db.users.find_one({'profile.email': email}) if user_obj_email is not None: raise UserError('EMAIL_EXIST') user_obj = { 'profile': { 'username': username, 'desc': 'Write something here', 'pubkey': '', 'image': 'default', 'email': email }, 'crypto': { 'crypto_method': crypto_method, 'password_hashed': password_hashed, 'salt1': salt1, 'salt2': salt2, 'master_key_encryptyed': master_key_encryptyed }, 'access_control': { 'status': 'normal', 'access_mode': 'blacklist', 'allowed_ops': [], 'denied_ops': [] }, 'settings': { 'blacklist': 'default' }, 'meta': { 'created_at': datetime.now() } } uid = db.users.insert_one(user_obj).inserted_id log(obj={'uid': uid, 'profile': user_obj['profile']}) return uid raise UserError('INCORRECT_SESSION')
def login(username, password, challenge, login_session_id): log( obj={ 'username': username, 'challenge': challenge, 'login_session_id': login_session_id }) if len(username) > UserConfig.MAX_USERNAME_LENGTH: raise UserError('USERNAME_TOO_LONG') if len(username) < UserConfig.MIN_USERNAME_LENGTH: raise UserError('USERNAME_TOO_SHORT') if len(password) > UserConfig.MAX_PASSWORD_LENGTH: raise UserError('PASSWORD_TOO_LONG') if len(password) < UserConfig.MIN_PASSWORD_LENGTH: raise UserError('PASSWORD_TOO_SHORT') if verify_session(login_session_id, 'LOGIN'): user_obj = db.users.find_one({'profile.username': username}) if not user_obj: log(level='SEC', obj={'msg': 'USER_NOT_EXIST'}) raise UserError('INCORRECT_LOGIN') if not verify_password_PBKDF2(password, user_obj['crypto']['salt1'], user_obj['crypto']['password_hashed']): log(level='SEC', obj={'msg': 'WRONG_PASSWORD'}) raise UserError('INCORRECT_LOGIN') user_id = str(user_obj['_id']) redis_user_key_lookup_key = f"user-{user_id}" redis_user_key = rdb.get(redis_user_key_lookup_key) logged_in = False if redis_user_key: # user already logged in on some other machines redis_user_obj_json_str = rdb.get(redis_user_key) if redis_user_obj_json_str: logged_in = True # reset expire time rdb.set(redis_user_key, redis_user_obj_json_str, ex=UserConfig.LOGIN_EXPIRE_TIME) rdb.set(redis_user_key_lookup_key, redis_user_key, ex=UserConfig.LOGIN_EXPIRE_TIME) if logged_in: return redis_user_key, user_obj['profile'] common_user_obj = { '_id': user_obj['_id'], 'profile': { 'username': user_obj['profile']['username'], 'image': user_obj['profile']['image'], 'desc': user_obj['profile']['desc'], 'email': user_obj['profile']['email'] }, 'access_control': user_obj['access_control'], 'settings': user_obj['settings'] } redis_user_value = dumps(common_user_obj) redis_user_key = binascii.hexlify(bytearray(random_bytes(16))).decode() redis_user_key_lookup_key = f"user-{user_obj['_id']}" rdb.set(redis_user_key, redis_user_value, ex=UserConfig.LOGIN_EXPIRE_TIME) rdb.set(redis_user_key_lookup_key, redis_user_key, ex=UserConfig.LOGIN_EXPIRE_TIME) log(obj={'redis_user_key': redis_user_key, 'user': common_user_obj}) return redis_user_key, common_user_obj['profile'] raise UserError('INCORRECT_SESSION')
def listPlaylistsForVideo(user, vid): video = tagdb.retrive_item({'_id': ObjectId(vid)}) if video is None: raise UserError('VIDEO_NOT_EXIST') if isObjectAgnosticOperationPermitted('viewPrivatePlaylist', user): auth_obj = {} else: auth_obj = { '$or': [{ 'playlist.meta.created_by': user['_id'] if user else '' }, { 'playlist.private': False, 'playlist.videos': { '$gt': 1 } }] } result = db.playlist_items.aggregate([{ '$match': { '$and': [{ 'pid': { '$in': video['item']['series'] } }, { 'vid': video['_id'] }] } }, { '$lookup': { 'from': 'playlists', 'localField': 'pid', 'foreignField': '_id', 'as': 'playlist' } }, { '$unwind': { 'path': '$playlist' } }, { '$match': auth_obj }]) ans = [] for obj in result: playlist_obj = obj['playlist'] playlist_obj['prev'] = '' playlist_obj['next'] = '' rank = obj['rank'] if rank > 0: playlist_obj['prev'] = str( db.playlist_items.find_one({ 'pid': playlist_obj['_id'], 'rank': int(rank - 1) })['vid']) if rank + 1 < playlist_obj['videos']: playlist_obj['next'] = str( db.playlist_items.find_one({ 'pid': playlist_obj['_id'], 'rank': int(rank + 1) })['vid']) ans.append(playlist_obj) return ans
def addVideoToPlaylist(pid, vid, user): log(obj={'pid': pid, 'vid': vid}) with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s: playlist = db.playlists.find_one({'_id': ObjectId(pid)}, session=s()) if playlist is None: raise UserError('PLAYLIST_NOT_EXIST') filterOperation('addVideoToPlaylist', user, playlist) if tagdb.retrive_item({'_id': ObjectId(vid)}, session=s()) is None: raise UserError('VIDEO_NOT_EXIST') if playlist["videos"] > PlaylistConfig.MAX_VIDEO_PER_PLAYLIST: raise UserError('VIDEO_LIMIT_EXCEEDED') conflicting_item = db.playlist_items.find_one( { 'pid': ObjectId(pid), 'vid': ObjectId(vid) }, session=s()) if conflicting_item is not None: editPlaylist_MoveLockFree(pid, conflicting_item, int(playlist["videos"]), session=s()) db.playlists.update_one({'_id': ObjectId(pid)}, { '$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() } }, session=s()) s.mark_succeed() return playlists = tagdb.retrive_item({'_id': ObjectId(vid)}, session=s())['item']['series'] playlists.append(ObjectId(pid)) playlists = list(set(playlists)) tagdb.update_item_query(ObjectId(vid), {'$set': { 'item.series': playlists }}, makeUserMeta(user), session=s()) db.playlist_items.insert_one( { "pid": ObjectId(pid), "vid": ObjectId(vid), "rank": int(playlist["videos"]), "meta": makeUserMeta(user) }, session=s()) db.playlists.update_one({"_id": ObjectId(pid)}, {"$inc": { "videos": int(1) }}, session=s()) db.playlists.update_one({'_id': ObjectId(pid)}, { '$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() } }, session=s()) s.mark_succeed()
def listFolder(viewing_user, user, path): if user == 'me': raise UserError('INCORRECT_USER') _verifyPath(path) folder_obj = _findFolder(user, path) if isinstance(user, dict): user = ObjectId(user['_id']) elif isinstance(user, str): user = ObjectId(user) if folder_obj['privateView']: filterOperation('listFolder', viewing_user, folder_obj) path_escaped = re.escape(path) query_regex = f'^{path_escaped}[^\\/]*\\/$' ret = db.playlist_folders.aggregate([{ '$match': { 'user': user, 'path': { '$regex': query_regex } } }, { '$lookup': { 'from': 'playlists', 'localField': 'playlist', 'foreignField': '_id', 'as': 'playlist_object' } }, { '$unwind': { 'path': '$playlist_object', 'preserveNullAndEmptyArrays': True } }, { '$sort': { 'path': 1 } }]) items = [i for i in ret] ans = [] for item in items: assert not (('playlist_object' in item) ^ item['leaf']) if item['privateView'] and ( viewing_user is None or (str(user) != str(viewing_user['_id']) and viewing_user['access_control']['status'] != 'admin')): continue if 'playlist_object' in item: # playlist item (leaf) if item['playlist_object'] is None: # leaf playlist does not exist, do not display # TODO: maybe just display it is gone, not deleting it with MongoTransaction(client) as s: db.playlist_folders.delete_one( { 'user': user, 'path': item['path'] }, session=s()) s.mark_succeed() elif item['playlist_object']['private']: # playlist is private if viewing_user is not None and filterOperation( 'viewPrivatePlaylist', viewing_user, item['playlist_object'], raise_exception=False): ans.append(item) else: ans.append(item) else: # subfolder item ans.append(item) return {'cur': folder_obj, 'children': ans}
def update_tag_group(self, group_name, new_tags, user = '', session = None): g_obj = self.db.groups.find_one({'name': group_name}, session = session) if g_obj is None: raise UserError('GROUP_NOT_EXIST') self.db.groups.update_one({'name': group_name}, {'$set': {'tags': new_tags, 'meta.modified_by': user, 'meta.modified_at': datetime.now()}}, session = session)
async def run_async(self, content, xpath, link, update_video_detail): uid = '' new_url = '' try: aid, p_num, b23vid = self.extract_link(self=self, link=link) if b23vid: aid_match = re.search(self.AID_MATCH_REGEX, content) aid = 'av' + aid_match.group(1) new_url = f"https://www.bilibili.com/video/{aid}?p=1" p_num = 1 uid = 'bilibili:%s-1' % aid else: new_url = link uid = self.unique_id(self=self, link=link) aid = aid[2:] # remove 'av' api_url = f'http://api.bilibili.com/x/web-interface/view?aid={aid}' async with aiohttp.ClientSession() as session: async with session.get(api_url) as resp: api_content = await resp.json() code = api_content['code'] if code != 0 or 'data' not in api_content: raise Exception(f'api request failed, message:\n{api_content}') data = api_content['data'] thumbnailURL = data['pic'] title = data['title'] desc = data['desc'] uploadDate = datetime.fromtimestamp(data['pubdate']).astimezone( timezone.utc) api_url = f'http://api.bilibili.com/x/tag/archive/tags?aid={aid}' async with aiohttp.ClientSession() as session: async with session.get(api_url) as resp: api_content = await resp.json() code = api_content['code'] if code != 0 or 'data' not in api_content: utags = [] else: utags = [item['tag_name'] for item in api_content['data']] if 'staff' in data: user_space_urls = [ 'https://space.bilibili.com/%d' % x['mid'] for x in data['staff'] ] elif 'owner' in data: user_space_urls = [ 'https://space.bilibili.com/%d' % data['owner']['mid'] ] cid = 0 async with aiohttp.ClientSession() as session: async with session.get( f'https://api.bilibili.com/x/player/pagelist?aid={aid}&jsonp=jsonp' ) as resp: api_content = await resp.text() if resp.status == 200: api_obj = loads(api_content) num_parts = len(api_obj['data']) if p_num < 1 or p_num > num_parts: raise UserError( f'P number out of range, should be in [1, {num_parts}]' ) part_name = api_obj['data'][p_num - 1]['part'] cid = api_obj['data'][p_num - 1]['cid'] else: raise Exception( f'api request failed, message:\n{api_content}') except UserError as ex: raise ex except: return makeResponseSuccess({ 'thumbnailURL': '', 'title': '已失效视频', 'desc': '已失效视频', 'site': 'bilibili', 'uploadDate': datetime.now(), "unique_id": uid, "utags": [], "url_overwrite": new_url, "placeholder": True }) return makeResponseSuccess({ 'thumbnailURL': thumbnailURL, 'title': title, 'desc': desc, 'site': 'bilibili', 'uploadDate': uploadDate, "unique_id": uid, "utags": utags, "url_overwrite": new_url, "user_space_urls": user_space_urls, 'extra': { 'part_name': part_name, 'cid': cid } })
def _check_language(self, language): if language not in VALID_LANGUAGES : raise UserError('UNRECOGNIZED_LANGUAGE')
def add_category(self, category, color = '#000', user = '', session = None) : cat = self.db.cats.find_one({'name': category}, session = session) if cat is not None: raise UserError("CATEGORY_ALREADY_EXIST") self.db.cats.insert_one({'name': category, 'count': 0, 'color': color, 'meta': {'created_by': user, 'created_at': datetime.now()}}, session = session)
def createOrModifyAuthorRecord(user, author_type, tagid, common_tags, user_spaces, desc, avatar_file_key=None): filterOperation('createOrModifyAuthorRecord', user) log( obj={ 'author_type': author_type, 'tagid': tagid, 'common_tags': common_tags, 'user_spaces': user_spaces, 'desc': desc, 'avatar_file_key': avatar_file_key }) if author_type not in ['individual', 'group']: raise UserError('INCORRECT_AUTHOR_TYPE') if not isinstance(user_spaces, list): raise UserError('INCORRECT_REQUEST_USER_SPACES') user_space_ids = createUserSpaceIds(user_spaces) if len(desc) > AuthorDB.DESC_MAX_LENGTH: raise UserError('DESC_TOO_LONG') with MongoTransaction(client) as s: tag_obj = tagdb._tag(tagid, session=s()) if tag_obj['category'] != 'Author': raise UserError('TAG_NOT_AUTHOR') existing_record = None log(obj={'tag_obj': tag_obj}) avatar_file = '' if 'author' in tag_obj: existing_record = db.authors.find_one({'_id': tag_obj['author']}, session=s()) assert existing_record avatar_file = existing_record['avatar'] log(obj={'old_record': existing_record}) common_tagids = tagdb.filter_and_translate_tags(common_tags, session=s()) if avatar_file_key: if avatar_file_key.startswith("upload-image-"): filename = rdb.get(avatar_file_key) if filename: avatar_file = filename.decode('ascii') if existing_record: record_id = existing_record['_id'] db.authors.update_one({'_id': record_id}, { '$set': { 'type': author_type, 'tagid': tagid, 'common_tagids': common_tagids, 'urls': user_spaces, 'user_space_ids': user_space_ids, 'desc': desc, 'avatar': avatar_file } }, session=s()) else: record_id = db.authors.insert_one( { 'type': author_type, 'tagid': tagid, 'common_tagids': common_tagids, 'urls': user_spaces, 'user_space_ids': user_space_ids, 'desc': desc, 'avatar': avatar_file }, session=s()).inserted_id record_id = ObjectId(record_id) db.tags.update_one({'_id': tag_obj['_id']}, {'$set': { 'author': record_id }}) s.mark_succeed() return str(record_id)
def add_or_rename_tag(self, tag_name, new_tag_name, language, user = '', session = None) : self._check_language(language) tag_obj = self._tag(tag_name, session = session) new_tag_alias_obj = self.db.tag_alias.find_one({'tag': new_tag_name}, session = session) if new_tag_alias_obj is not None and new_tag_alias_obj['dst'] != tag_obj['_id'] : raise UserError('TAG_ALREADY_EXIST') if new_tag_alias_obj is None : if not isinstance(tag_name, int) : rc, lang_referenced = self._get_tag_name_reference_count(tag_name, tag_obj) assert rc > 0 # if it is only referenced once AND it is exactly referenced by the given language # then it is a unique rename operation, we have to delete the old name if rc == 1 and lang_referenced == language : self.db.tag_alias.update_one({'tag': tag_name}, { '$set': { 'tag': new_tag_name, 'meta.modified_by': user, 'meta.modified_at': datetime.now() } }, session = session) self.aci.DeleteWord(tag_name) self.aci.AddWord([(tag_obj['id'], new_tag_name, language)]) else : self.db.tag_alias.insert_one({ 'tag': new_tag_name, 'dst': tag_obj['_id'], 'meta': {'created_by': user, 'created_at': datetime.now(), 'modified_by': user, 'modified_at': datetime.now()} }, session = session) self.aci.AddWord([(tag_obj['id'], new_tag_name, language)]) else : self.db.tag_alias.insert_one({ 'tag': new_tag_name, 'dst': tag_obj['_id'], 'meta': {'created_by': user, 'created_at': datetime.now(), 'modified_by': user, 'modified_at': datetime.now()} }, session = session) self.aci.AddWord([(tag_obj['id'], new_tag_name, language)]) else : # since tag_alias already exists for new_tag_name, no need to insert new tag_alias # but we need to consider whether or not to delete the old one if not isinstance(tag_name, int) : rc, lang_referenced = self._get_tag_name_reference_count(tag_name, tag_obj) assert rc > 0 if lang_referenced is None : # we have an alias with the same name raise UserError('TAG_ALREADY_EXIST') # delete ONLY IF it is referenced only once AND it is exactly referenced by the given language if rc == 1 and lang_referenced == language and tag_name != new_tag_name : self.db.tag_alias.delete_one({'tag': tag_name}, session = session) self.aci.DeleteWord(tag_name) else : rc, lang_referenced = self._get_tag_name_reference_count(new_tag_name, tag_obj) assert rc > 0 if lang_referenced is None : # we have an alias with the same name raise UserError('TAG_ALREADY_EXIST') # add or update tag specified by language self.db.tags.update_one({'_id': tag_obj['_id']}, { '$set': { f'languages.{language}': new_tag_name, 'meta.modified_by': user, 'meta.modified_at': datetime.now() } }, session = session)
def removeSubScription(user, sub_id) : obj = db.subs.find_one({'_id': ObjectId(sub_id)}) if obj is None : raise UserError('SUB_NOT_EXIST') db.subs.delete_one({'_id': ObjectId(sub_id)})
def verify_tags(self, tags, session = None) : found_tags = self.db.tag_alias.find({'tag': {'$in': tags}}, session = session) tm = [tag['tag'] for tag in found_tags] for tag in tags : if tag not in tm : raise UserError('TAG_NOT_EXIST', tag)
def _verifyFolderName(name) : if '/' in name or '\\' in name or '*' in name : raise UserError('INVALID_PATH') return True
def update_item(self, item_id, item, user = '', session = None): item = self.db.items.find_one({'_id': ObjectId(item_id)}, session = session) if item is None: raise UserError('ITEM_NOT_EXIST') self.db.items.update_one({'_id': ObjectId(item_id)}, {'$set': {'item': item, 'meta.modified_by': user, 'meta.modified_at': datetime.now()}}, session = session)
def addComment(user, thread_id: ObjectId, text: str): # user can add comments # TODO notify user being replied to filterOperation('postComment', user) l = len(text) if l > Comments.MAX_COMMENT_LENGTH_LONG: raise UserError('COMMENT_TOO_LONG') elif l > Comments.MAX_COMMENT_LENGTH_REGULAR and not filterOperation( 'postLongComment', user, raise_exception=False): raise UserError('COMMENT_TOO_LONG') thread_obj = db.comment_threads.find_one({'_id': thread_id}) if thread_obj is None: raise UserError('THREAD_NOT_EXIST') with redis_lock.Lock(rdb, "thread:" + str(thread_id)), MongoTransaction(client) as s: cid = str( db.comment_items.insert_one( { 'thread': thread_id, 'content': text, 'hidden': False, 'deleted': False, 'upvotes': 0, 'downvotes': 0, 'meta': makeUserMetaObject(user) }, session=s()).inserted_id) db.comment_threads.update_one({'_id': thread_id}, {'$inc': { 'count': int(1) }}, session=s()) note_obj = { "cid": ObjectId(cid), "replied_by": makeUserMeta(user), "content": text[:Comments.NOTIFICATION_CONTENT_LENGTH] } # =========================================================== if 'obj_type' in thread_obj and 'obj_id' in thread_obj: note_obj['replied_type'] = thread_obj['obj_type'] note_obj['replied_obj'] = thread_obj['obj_id'] else: obj = db.items.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'video' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one( {'_id': thread_id}, {'$set': { 'obj_type': 'video', 'obj_id': obj['_id'] }}, session=s()) else: obj = db.playlists.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'playlist' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one({'_id': thread_id}, { '$set': { 'obj_type': 'playlist', 'obj_id': obj['_id'] } }, session=s()) else: obj = db.users.find_one({'comment_thread': thread_id}, session=s()) if obj: note_obj['replied_type'] = 'user' note_obj['replied_obj'] = obj['_id'] db.comment_threads.update_one({'_id': thread_id}, { '$set': { 'obj_type': 'user', 'obj_id': obj['_id'] } }, session=s()) else: log(level='ERR', obj={ 'msg': 'orphan thread found!!', 'thread_id': thread_id, 'thread_obj': thread_obj }) raise UserError('UNKNOWN_ERROR') # =========================================================== createNotification('comment_reply', thread_obj['owner'], session=s(), other=note_obj) s.mark_succeed() return cid
def remove_tag_group(self, group_name, user = '', session = None): g_obj = self.db.groups.find_one({'name': group_name}, session = session) if g_obj is None: raise UserError('GROUP_NOT_EXIST') self.db.groups.remove({'name': group_name}, session = session)
def getPlaylist(pid) : ret = db.playlists.find_one({'_id': ObjectId(pid)}) if not ret : raise UserError('PLAYLIST_NOT_EXIST') return ret
def list_tag_group(self, group_name, session = None): g_obj = self.db.groups.find_one({'name': group_name}, session = session) if g_obj is None: raise UserError('GROUP_NOT_EXIST') return g_obj['tags']
def listCommonTagIDs(pid, user) : playlist = db.playlists.find_one({'_id': ObjectId(pid)}) if playlist is None : raise UserError('PLAYLIST_NOT_EXIST') if playlist['private'] : filterOperation('viewPrivatePlaylist', user, playlist) result = db.playlist_items.aggregate([ { "$match" : { "pid" : ObjectId(pid) } }, { "$lookup" : { "from" : "items", "localField" : "vid", "foreignField" : "_id", "as" : "video" } }, { "$project" : { "video.tags" : 1 } }, { "$unwind" : { "path" : "$video" } }, { "$project" : { "tags" : {'$filter': {'input': '$video.tags', 'as': 'tag', 'cond': {'$lt': ['$$tag', 0x80000000]}}} } }, { "$group" : { "_id" : 0, "tags" : { "$push" : "$tags" }, "initialTags" : { "$first" : "$tags" } } }, { "$project" : { "commonTags" : { "$reduce" : { "input" : "$tags", "initialValue" : "$initialTags", "in" : { "$setIntersection" : [ "$$value", "$$this" ] } } } } } ]) ret = [i for i in result] if ret : return ret[0]['commonTags'] else : return []
def ajax_notes_send_dm(rd, user, data): filterOperation('sendDM', user) if len(data.content) > 65536 : raise UserError('CONTENT_TOO_LONG') createDirectMessage(user['_id'], ObjectId(data.dst_user), other = {'content': data.content})