def update(self, data): id = data['_id'] redisData = conn.get(id) if redisData == None: try: lock = redis_lock.Lock(conn, id) lock.acquire(blocking=True) order = Orders.objects.get({'_id': ObjectId(id)}) order.notification = data.get("notification", order.notification) order.invoiceURL = data.get("invoiceURL", order.invoiceURL) order.save() conn.set(id, json.dumps(data)) lock.release() except: print 'error in updating orders collection' else: conn.set(id, json.dumps(data)) try: lock = redis_lock.Lock(conn, id) lock.acquire(blocking=True) order = Orders.objects.get({'_id': ObjectId(id)}) order.notification = data.get("notification", order.notification) order.invoiceURL = data.get("invoiceURL", order.invoiceURL) order.save() lock.release() except: print 'error in updating orders collection'
def createFolder(user, root, name, privateView=False, privateEdit=True): filterOperation('createFolder', user, root) _verifyPath(root) _verifyFolderName(name) with redis_lock.Lock(rdb, f"folderEdit:{str(makeUserMeta(user))}:{root}"): _findFolder(user, root) fullpath = root + name + "/" with redis_lock.Lock( rdb, f"folderEdit:{str(makeUserMeta(user))}:{fullpath}" ), MongoTransaction(client) as s: obj = _findFolder(user, fullpath, raise_exception=False) if obj: raise UserError('FOLDER_ALREADY_EXIST') folder_obj = { 'user': makeUserMeta(user), 'leaf': False, 'playlist': None, 'name': name, 'path': fullpath, 'privateView': privateView, 'privateEdit': privateEdit, 'meta': makeUserMetaObject(user) } db.playlist_folders.insert_one(folder_obj, session=s()) s.mark_succeed()
def get_locks(cls, redis_client, name, auto_renewal, expiry_seconds): # confirmation lock guarantee consistency when writing global_lock = redis_lock.Lock(redis_client=redis_client, name='{0}/{1}__locked'.format( cls.__name__, name), expire=expiry_seconds, id=None, auto_renewal=auto_renewal, strict=True) # counter lock guarantee consistency when accessing read counter counter_lock = redis_lock.Lock( redis_client=redis_client, name='{0}/{1}_read_counter__locked'.format(cls.__name__, name), expire=expiry_seconds, id=None, auto_renewal=auto_renewal, strict=True) # write waiting lock enforces priority of writes over reads write_waiting_lock = redis_lock.Lock( redis_client=redis_client, name='{0}/{1}_write_pending__locked'.format(cls.__name__, name), expire=expiry_seconds, id=None, auto_renewal=auto_renewal, strict=True) return global_lock, counter_lock, write_waiting_lock
def reader_lock(redis_t, lock_name): lock_name = f"{C.provider_uri}:{lock_name}" current_cache_rlock = redis_lock.Lock(redis_t, "%s-rlock" % lock_name) current_cache_wlock = redis_lock.Lock(redis_t, "%s-wlock" % lock_name) # make sure only one reader is entering current_cache_rlock.acquire(timeout=60) try: current_cache_readers = redis_t.get("%s-reader" % lock_name) if current_cache_readers is None or int( current_cache_readers) == 0: current_cache_wlock.acquire() redis_t.incr("%s-reader" % lock_name) finally: current_cache_rlock.release() try: yield finally: # make sure only one reader is leaving current_cache_rlock.acquire(timeout=60) try: redis_t.decr("%s-reader" % lock_name) if int(redis_t.get("%s-reader" % lock_name)) == 0: redis_t.delete("%s-reader" % lock_name) current_cache_wlock.reset() finally: current_cache_rlock.release()
def createPlaylistFromCopies(pid, site, user): if site not in ["youtube", "bilibili", "nicovideo", "twitter", "acfun"]: raise UserError("UNSUPPORTED_SITE") videos, _, playlist_obj = listAllPlaylistVideosOrdered(pid, user) new_pid = createPlaylist('english', playlist_obj['title']['english'] + ' - %s' % site, playlist_obj['desc']['english'], playlist_obj['cover'], user, playlist_obj['private']) with redis_lock.Lock(rdb, 'editLink'), redis_lock.Lock( rdb, "playlistEdit:" + str(new_pid)), MongoTransaction(client) as s: rank = 0 for video in videos: copies = video['item']['item']['copies'] for cp in copies: item = tagdb.retrive_item(cp, session=s()) if item['_id'] != video['vid'] and item['item']['site'] == site: addVideoToPlaylistLockFree(new_pid, item['_id'], user, rank, session=s()) rank += 1 break s.mark_succeed() return new_pid
def reader_lock(redis_t, lock_name: str): current_cache_rlock = redis_lock.Lock(redis_t, f"{lock_name}-rlock") current_cache_wlock = redis_lock.Lock(redis_t, f"{lock_name}-wlock") lock_reader = f"{lock_name}-reader" # make sure only one reader is entering current_cache_rlock.acquire(timeout=60) try: current_cache_readers = redis_t.get(lock_reader) if current_cache_readers is None or int(current_cache_readers) == 0: CacheUtils.acquire(current_cache_wlock, lock_name) redis_t.incr(lock_reader) finally: current_cache_rlock.release() try: yield finally: # make sure only one reader is leaving current_cache_rlock.acquire(timeout=60) try: redis_t.decr(lock_reader) if int(redis_t.get(lock_reader)) == 0: redis_t.delete(lock_reader) current_cache_wlock.reset() finally: current_cache_rlock.release()
def requestSubtitleOCR(user, vid: ObjectId): # step 1: verify user and video filterOperation('requestSubtitleOCR', user) video_item = tagdb.retrive_item(vid) if video_item is None: raise UserError('VIDEO_NOT_FOUND') # step 2: check if request exists with redis_lock.Lock( rdb, "videoEdit:" + video_item['item']['unique_id']), redis_lock.Lock( rdb, "mmdocr_global_lock"), MongoTransaction(client) as s: ocr_record = db.subtitle_ocr.find_one({"vid": vid}, session=s()) if ocr_record is None: # create new record record = { "vid": vid, "status": "Queuing", "version": 0, # version is set in postSubtitleOCRResult "worker_id": "", # worker_id is set in queryAndProcessQueuingRequests "meta": makeUserMetaObject(user) } db.subtitle_ocr.insert_one(record, session=s()) else: status = ocr_record['status'] record_id = ocr_record['_id'] record_version = ocr_record['version'] mmdocr_version = int(Config.MMDOCR_VERSION) if status in ['NoRecord', 'RecordOutOfDate', 'Error']: assert status != "NoRecord" db.subtitle_ocr.update_one({"_id": record_id}, { "$set": { "status": "Queuing", "meta.modified_at": datetime.utcnow(), "meta.modified_by": ObjectId(user['_id']) } }, session=s()) pass elif status == 'RecordExists': if record_version < mmdocr_version: # newer version of mmdocr exists db.subtitle_ocr.update_one({"_id": record_id}, { "$set": { "status": "Queuing", "meta.modified_at": datetime.utcnow(), "meta.modified_by": ObjectId(user['_id']) } }, session=s()) pass else: raise UserError('RECORD_ALREADY_EXISTS') else: raise UserError('VIDEO_BEING_PROCESSED') s.mark_succeed()
def runk3(k3): with redis_lock.Lock(r_cache, '{0}-r-w-lock'.format(k3)): try: num = int(r_cache.get(k3)) if num == 1: return except: r_cache.set(k3, 1) index(k3) with redis_lock.Lock(r_cache, '{0}-r-w-lock'.format(k3)): r_cache.set(k3, '结束')
def translateVTT(subid: ObjectId, language: str, translator: str): sub_obj = db.subtitles.find_one({'_id': subid}) if sub_obj is None: raise UserError('ITEM_NOT_FOUND') if sub_obj['format'] != 'vtt': raise UserError('ONLY_VTT_SUPPORTED') with redis_lock.Lock(rdb, "subtitleEdit:" + str(subid)), MongoTransaction(client) as s: cache = db.subtitle_translation_cache.find_one( { "subid": subid, "lang": language, "translator": translator }, session=s()) if cache is None or cache['version'] < sub_obj['meta']['modified_at']: # cache miss vtt = webvtt.read_buffer(io.StringIO(sub_obj['content'])) if translator == 'googletrans': result = translate_google(vtt, language) elif translator == 'baidutrans': with redis_lock.Lock(rdb, "lock-baidutrans"): result = translate_baidu(vtt, language) else: raise UserError('UNSUPPORTED_TRANSLATOR') if cache is None: db.subtitle_translation_cache.insert_one( { 'subid': subid, 'translator': translator, 'lang': language, 'version': sub_obj['meta']['modified_at'], 'content': result }, session=s()) else: db.subtitle_translation_cache.update_one( {'_id': cache['_id']}, { '$set': { 'version': sub_obj['meta']['modified_at'], 'content': result } }, session=s()) s.mark_succeed() return result else: # cache hit return cache['content']
def postSubtitleOCRResult(user, unique_id: str, content: str, subformat: str, version: int, worker_id: str): # step 1: verify and post filterOperation('subtitleocr_postSubtitleOCRResult', user) subformat = subformat.lower() if subformat not in VALID_SUBTITLE_FORMAT: raise UserError('INVALID_SUBTITLE_FORMAT') video_item = tagdb.retrive_item({"item.unique_id": unique_id}) if video_item is None: raise UserError('VIDEO_NOT_FOUND') try: size = len(content.encode('utf-8')) except: size = -1 with redis_lock.Lock( rdb, "videoEdit:" + video_item['item']['unique_id']), redis_lock.Lock( rdb, "mmdocr_global_lock"), MongoTransaction(client) as s: # delete old versions db.subtitles.delete_many({ 'vid': video_item['_id'], 'autogen': True }, session=s()) subid = db.subtitles.insert_one( { 'vid': video_item['_id'], 'lang': 'UNKNOWN', 'format': subformat, 'content': content, 'size': size, 'deleted': False, 'version': version, 'autogen': True, 'meta': makeUserMetaObject(None) }, session=s()).inserted_id # step 2: update subtitle_ocr db.subtitle_ocr.update_one({"vid": video_item['_id']}, { "$set": { "status": "RecordExists", "version": version, "worker_id": worker_id } }, session=s()) s.mark_succeed() return subid
def updatePlaylistInfo(pid, language, title, desc, cover, user, private = False) : log(obj = {'title': title, 'desc': desc, 'cover': cover, 'private': private}) if len(title) > PlaylistConfig.MAX_TITLE_LENGTH : raise UserError('TITLE_TOO_LONG') if len(desc) > PlaylistConfig.MAX_DESC_LENGTH : raise UserError('DESC_TOO_LONG') if cover and len(cover) > PlaylistConfig.MAX_COVER_URL_LENGTH : raise UserError('URL_TOO_LONG') if not title : raise UserError('EMPTY_TITLE') if not desc : raise UserError('EMPTY_DESC') with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s : list_obj = db.playlists.find_one({'_id': ObjectId(pid)}) log(obj = {'playlist': list_obj}) if list_obj is None : raise UserError('PLAYLIST_NOT_EXIST') filterOperation('editPlaylist', user, list_obj) if cover : db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': {"cover": cover}}, session = s()) db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { "title.%s" % language: title, "desc.%s" % language: desc, "private": private, 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now()}}, session = s()) s.mark_succeed()
def rlock(redis_client, lock_id, holder_id=None): """ A reentrant lock(ish). This lock can be reacquired using the same holder_id as that which currently holds it. Parameters ---------- redis_client : redis.StrictRedis Client for a redis lock_id : str Identifier of the lock to try and acquire holder_id : bytes Identifier of the holder, defaults to `get_ident()` Notes ----- Not a true reentrant lock because being held n times by one holder requires only 1 release. Only the _first_ holder can release. """ if holder_id is None: holder_id = f"{get_ident()}".encode() logger.debug(f"My lock holder id is {holder_id}") logger.debug( f"Getting lock id {lock_id}. Currently held by {redis_lock.Lock(redis_client, lock_id, id=holder_id).get_owner_id()}" ) try: with redis_lock.Lock(redis_client, lock_id, id=holder_id): yield except AlreadyAcquired: yield logger.debug(f"{holder_id} released lock id {lock_id}.")
def get_access_token(): cache_key = 'access_token' with redis_lock.Lock(current_app.redis, cache_key, expire=10, auto_renewal=True): return _get_access_token(cache_key)
def set_lock(database): """ Sets the lock for the database :param database: The database you are modifying :return: Locks the database """ return redis_lock.Lock(database, "lock72")
def addPlaylistsToFolder(user, path, playlists) : _verifyPath(path) with redis_lock.Lock(rdb, f"folderEdit:{str(makeUserMeta(user))}:{path}"), MongoTransaction(client) as s : folder_obj = _findFolder(user, path) filterOperation('addPlaylistsToFolder', user, folder_obj) for pid in playlists : playlist = playlist_db.retrive_item(pid, session = s()) if playlist is None : continue # skip non-exist playlist if playlist['item']['private'] and not filterOperation('viewPrivatePlaylist', user, playlist, raise_exception = False) : continue # skip other's private playlist playlist_path = path + "\\" + str(playlist['_id']) + "\\/" if _findFolder(user, playlist_path, raise_exception = False) : continue # skip duplicated playlist playlist_obj = { 'user': makeUserMeta(user), 'leaf': True, 'playlist': playlist['_id'], 'name': None, 'path': playlist_path, 'privateView': folder_obj['privateView'], 'privateEdit': folder_obj['privateEdit'], 'meta': makeUserMetaObject(user) } db.playlist_folders.insert_one(playlist_obj, session = s()) db.playlist_folders.update_one({'_id': folder_obj['_id']}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() }}, session = s()) s.mark_succeed()
def _create_lock_object(self, key): ''' Returns a lock object, split for testing ''' return redis_lock.Lock(self.lock_redis_conn, key, expire=self.settings['REDIS_LOCK_EXPIRATION'], auto_renewal=True)
def queryAndProcessQueuingRequests(user, max_videos: int, worker_id: str): filterOperation('subtitleocr_queryAndProcessQueuingRequests', user) # step 1: max_videos > 0 and max_videos <= 100 if max_videos <= 0 or max_videos > Subtitles.MAX_WORKER_JOBS: raise UserError('TOO_MANY_JOBS') with redis_lock.Lock(rdb, "mmdocr_global_lock"), MongoTransaction(client) as s: # step 2: get top k oldest requests ret = list( db.subtitle_ocr.find({ "status": "Queuing" }, session=s()).sort([("meta.modified_at", 1) ]).limit(max_videos)) # FIFO ret_vids = [i['vid'] for i in ret] ret_ids = [i['_id'] for i in ret] # step 3: retrive video URLs video_items = tagdb.retrive_items({"_id": { "$in": ret_vids }}, session=s()) video_urls = [{ "url": i["item"]["url"], "unique_id": i["item"]["unique_id"] } for i in video_items] # step 4: mark reserved db.subtitle_ocr.update_many({"_id": { "$in": ret_ids }}, {"$set": { "status": "Reserved", "worker_id": worker_id }}, session=s()) s.mark_succeed() # step 5: return return video_urls
def removeVideoFromPlaylist(pid, vid, page, page_size, user) : log(obj = {'pid': pid, 'vid': vid}) with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s : playlist = db.playlists.find_one({'_id': ObjectId(pid)}, session = s()) if playlist is None : raise UserError('PLAYLIST_NOT_EXIST') filterOperation('editPlaylist', user, playlist) if playlist["videos"] > 0 : entry = db.playlist_items.find_one({"pid": ObjectId(pid), "vid": ObjectId(vid)}, session = s()) if entry is None : raise UserError('VIDEO_NOT_EXIST_OR_NOT_IN_PLAYLIST') db.playlist_items.update_many({'pid': ObjectId(pid), 'rank': {'$gt': entry['rank']}}, {'$inc': {'rank': int(-1)}}, session = s()) db.playlist_items.delete_one({'_id': entry['_id']}, session = s()) db.playlists.update_one({"_id": ObjectId(pid)}, {"$inc": {"videos": int(-1)}}, session = s()) db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now()}}, session = s()) else : raise UserError('EMPTY_PLAYLIST') """ video_page, video_count = listPlaylistVideos(pid, page - 1, page_size, user) if len(video_page) == 0 and page > 1 and video_count > 0 : # in case deleting video results in current page becomes empty, show the previous page video_page, video_count = listPlaylistVideos(pid, page - 2, page_size, user) s.mark_succeed() return {'videos': video_page, 'video_count': video_count, 'page': page - 1} """ s.mark_succeed()
def editPlaylist_MoveUp(pid, vid, page, page_size, user) : log(obj = {'pid': pid, 'vid': vid}) with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s : playlist = db.playlists.find_one({'_id': ObjectId(pid)}, session = s()) if playlist is None : raise UserError('PLAYLIST_NOT_EXIST') filterOperation('editPlaylist', user, playlist) if playlist["videos"] > 0 : entry = db.playlist_items.find_one({"pid": ObjectId(pid), "vid": ObjectId(vid)}, session = s()) if entry is None : s.mark_failover() raise UserError('VIDEO_NOT_EXIST_OR_NOT_IN_PLAYLIST') if entry['rank'] <= 0 : return None exchange_entry = db.playlist_items.find_one({"pid": ObjectId(pid), "rank": int(entry['rank'] - 1)}, session = s()) db.playlist_items.update_one({'_id': entry['_id']}, {'$set': {'rank': int(entry['rank'] - 1)}}, session = s()) db.playlist_items.update_one({'_id': exchange_entry['_id']}, {'$set': {'rank': int(entry['rank'])}}, session = s()) db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now()}}, session = s()) #video_page, video_count = listPlaylistVideos(pid, page - 1, page_size, user) s.mark_succeed() #return {'videos': video_page, 'video_count': video_count, 'page': page} else : raise UserError('EMPTY_PLAYLIST')
def watch_channels(): try: redis_client = omniredis.get_redis_client(decode_responses=False) last_run_key = "watch:channels:last_run_datetime" if not _is_allowed_to_run(redis_client, last_run_key): return statsd = stats.get_statsd_client() with redis_lock.Lock(redis_client, 'watch_channels', expire=LOCK_EXPIRATION, auto_renewal=True): with statsd.timer('watch.channels'): for team_name, bot_name in settings.PRIMARY_SLACK_BOT.items(): logger.info( 'Updating slack channel list.', extra={ 'team': team_name, 'bot': bot_name }, ) team = Team.get_team_by_name(team_name) bot = Bot.get_bot_by_name(team, bot_name) slack.update_channels(bot) redis_client.set(last_run_key, datetime.now().isoformat()) except Exception: logger.exception('Failed to update slack channel list.', exc_info=True) finally: return gevent.spawn_later(settings.WATCHER_SPAWN_WAIT_TIME_IN_SEC, watch_channels)
def schedule(task_file, debug, rabbitmq, redis_url, expire): try: config = yaml.safe_load(task_file) except yaml.YAMLError as e: raise ClickException(f"Yaml task file is invalid: {e}") if rabbitmq: rabbitmq_broker = RabbitmqBroker(url=rabbitmq) set_broker(rabbitmq_broker) if debug: logging.getLogger().setLevel(logging.DEBUG) logging.getLogger("pika").setLevel(logging.CRITICAL) scheduler = BlockingScheduler(timezone=utc) try: add_all_jobs(scheduler, config["jobs"]) except KeyError as e: raise ClickException( f"Config file missing required parameter: {e.args}") if not redis_url: scheduler.start() return conn = redis.Redis.from_url(redis_url) with redis_lock.Lock(conn, LOCK_NAME, id=PROCESS_KEY, expire=expire, auto_renewal=True): scheduler.start()
def new_lock(self, key, **params): """Creates a new lock with a lock manager""" opts = { k: v for k, v in params.items() if k in {'expire', 'auto_renewal'} } return redis_lock.Lock(self.client, name=key, **opts)
def addToPlaylist(user, pid: ObjectId, text: str, use_bleach=True): filterOperation('postComment', user) playlist_obj = playlist_db.retrive_item(pid) if playlist_obj is None: raise UserError('PLAYLIST_NOT_EXIST') with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)): if 'comment_thread' in playlist_obj: cid = addComment(user, playlist_obj['comment_thread'], text, use_bleach=use_bleach) return playlist_obj['comment_thread'], cid else: with MongoTransaction(client) as s: tid = createThread('playlist', playlist_obj['_id'], playlist_obj['meta']['created_by'], session=s()) playlist_db.update_item_query( playlist_obj, {'$set': { 'comment_thread': tid }}, session=s()) s.mark_succeed() cid = addComment(user, tid, text) return tid, cid
def addToVideo(user, vid: ObjectId, text: str, use_bleach=True): filterOperation('postComment', user) video_obj = db.videos.find_one({'_id': vid}) if video_obj is None: raise UserError('VIDEO_NOT_EXIST') with redis_lock.Lock(rdb, "videoEdit:" + video_obj["item"]["unique_id"]): if 'comment_thread' in video_obj: cid = addComment(user, video_obj['comment_thread'], text, use_bleach=use_bleach) return video_obj['comment_thread'], cid else: with MongoTransaction(client) as s: tid = createThread('video', video_obj['_id'], video_obj['meta']['created_by'], session=s()) db.videos.update_one({'_id': vid}, {'$set': { 'comment_thread': tid }}, session=s()) s.mark_succeed() cid = addComment(user, tid, text) return tid, cid
def get_bet_number(game_serial, amount): redis_tool = RedisTools() if redis_tool.exists(game_serial): lock = redis_lock.Lock(redis_tool.redis_conn, 'bet_number', expire=1) if lock.acquire(): array_full = json.loads(redis_tool.get(game_serial)) length = len(array_full) if length <= 0: return 3000 if length < amount: return 3001 award_number_arr = random.sample(array_full, amount) # print(award_number_arr) # print(type(award_number_arr)) array_full = list( set(array_full).difference(set(award_number_arr))) if len(array_full) <= 0: redis_tool.delete(game_serial) else: redis_tool.set(game_serial, json.dumps(array_full)) lock.release() return award_number_arr return 3002
def renameFolder(user, path, new_name) : _verifyPath(path) _verifyFolderName(new_name) if path == "/" : raise UserError('INVALID_PATH') with redis_lock.Lock(rdb, f"folderEdit:{str(makeUserMeta(user))}:{path}"), MongoTransaction(client) as s : folder_obj = _findFolder(user, path) filterOperation('renameFolder', user, folder_obj) parent_path, cur_folder = _parentPath(path) if '\\' in cur_folder : raise UserError('INVALID_PATH') if db.playlist_folders.find_one({'user': makeUserMeta(user), 'path': parent_path + new_name + '/'}) : raise UserError('FOLDER_ALREADY_EXIST') parent_path_escaped = re.escape(parent_path) cur_folder_esacped = re.escape(cur_folder) query_regex = f'^{parent_path_escaped}{cur_folder_esacped}\\/.*' replace_regex = re.compile(f'^({parent_path_escaped})({cur_folder_esacped})(\\/.*)') paths = db.playlist_folders.find({'user': makeUserMeta(user), 'path': {'$regex': query_regex}}, session = s()) db.playlist_folders.update_one({'user': makeUserMeta(user), 'path': {'$regex': f'^{parent_path_escaped}{cur_folder_esacped}\\/$'}}, {'$set': {'name': new_name}}, session = s()) new_name_escaped = re.escape(new_name) for p in paths : new_path = replace_regex.sub(rf'\g<1>{new_name}\g<3>', p['path']) db.playlist_folders.update_one({'_id': p['_id']}, {'$set': {'path': new_path}}, session = s()) db.playlist_folders.update_one({'user': makeUserMeta(user), 'path': {'$regex': query_regex}}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() }}, session = s()) s.mark_succeed() return parent_path + new_name + '/'
def changeFolderAccess(user, path, privateView, privateEdit, recursive = True) : _verifyPath(path) with redis_lock.Lock(rdb, f"folderEdit:{str(makeUserMeta(user))}:{path}"), MongoTransaction(client) as s : folder_obj = _findFolder(user, path) filterOperation('changeFolderAccess', user, folder_obj) path_escaped = re.escape(path) if recursive : query_regex = f'^{path_escaped}.*' else : query_regex = f'^{path_escaped}$' db.playlist_folders.update_many( { 'user': makeUserMeta(user), 'path': {'$regex': query_regex} }, { '$set': { 'privateView': privateView, 'privateEdit': privateEdit } }, session = s()) db.playlist_folders.update_one({'_id': folder_obj['_id']}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() }}, session = s()) s.mark_succeed()
def insertIntoPlaylist(pid, vid, rank, user) : log(obj = {'pid': pid, 'vid': vid, 'rank': rank}) with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s : playlist = db.playlists.find_one({'_id': ObjectId(pid)}, session = s()) if playlist is None : raise UserError('PLAYLIST_NOT_EXIST') filterOperation('editPlaylist', user, playlist) if tagdb.retrive_item({'_id': ObjectId(vid)}, session = s()) is None : raise UserError('VIDEO_NOT_EXIST') if playlist["videos"] > PlaylistConfig.MAX_VIDEO_PER_PLAYLIST : raise UserError('VIDEO_LIMIT_EXCEEDED') conflicting_item = db.playlist_items.find_one({'pid': ObjectId(pid), 'vid': ObjectId(vid)}, session = s()) if conflicting_item is not None : editPlaylist_MoveLockFree(pid, conflicting_item, rank, session = s()) db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now()}}, session = s()) s.mark_succeed() return if rank < 0 : raise UserError('OUT_OF_RANGE') if rank > playlist['videos'] : rank = int(playlist['videos']) playlists = tagdb.retrive_item({'_id': ObjectId(vid)}, session = s())['item']['series'] playlists.append(ObjectId(pid)) playlists = list(set(playlists)) tagdb.update_item_query(ObjectId(vid), {'$set': {'item.series': playlists}}, makeUserMeta(user), session = s()) db.playlists.update_one({"_id": ObjectId(pid)}, {"$inc": {"videos": int(1)}}, session = s()) db.playlist_items.update_many({'pid': ObjectId(pid), 'rank': {'$gte': rank}}, {'$inc': {'rank': int(1)}}, session = s()) db.playlist_items.insert_one({"pid": ObjectId(pid), "vid": ObjectId(vid), "rank": int(rank), "meta": makeUserMeta(user)}, session = s()) db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now()}}, session = s()) s.mark_succeed()
def updatePlaylistCoverVID(pid, vid, page, page_size, user): log(obj={'pid': pid, 'vid': vid}) with redis_lock.Lock(rdb, "playlistEdit:" + str(pid)), MongoTransaction(client) as s: list_obj = db.playlists.find_one({'_id': ObjectId(pid)}) log(obj={'playlist': list_obj}) if list_obj is None: raise UserError('PLAYLIST_NOT_EXIST') filterOperation('editPlaylist', user, list_obj) video_obj = filterSingleVideo(vid, user) if video_obj is None: raise UserError('VIDEO_NOT_EXIST') cover = video_obj['item']['cover_image'] db.playlists.update_one({'_id': ObjectId(pid)}, {'$set': { "cover": cover }}, session=s()) db.playlists.update_one({'_id': ObjectId(pid)}, { '$set': { 'meta.modified_by': makeUserMeta(user), 'meta.modified_at': datetime.now() } }, session=s()) #video_page, video_count = listPlaylistVideos(pid, page - 1, page_size, user) s.mark_succeed()
def get_app_html_list(version): """ Checks if current HTML version matches the passed one, and returns update data, if necessary @param version: passed from client version string @type version: basestring @return: JSON-compatible dict, if version is outdated. None if the version is up to date @rtype: dict or None """ if version is None: return None conn = settings.REDIS # if cached version matches the sent one, don't update ver_cached = conn.get(models.DB_HTML_VERSION) if (ver_cached and ver_cached == version) or ver_cached == 'None': return None res = conn.get(models.DB_HTML_CACHE) # if both HTML and version are cached, and version doesn't match - return cached content if res is not None and ver_cached is not None and ver_cached != version: try: return {'version': ver_cached, 'data': json.loads(res)} except ValueError as e: logger.error(u"Broken JSON in HTML cache: {0}".format(e)) return None # not cached/expired - regenerate cache # locked to prevent race conditions with redis_lock.Lock(conn, "smsapp-html-content", id="db_owner{0}".format(settings.REDIS_DB)): pipe = conn.pipeline(transaction=False) logger.debug(u"Regenerating HTML cache at db {0}".format( settings.REDIS_DB)) try: opt = models.Option.objects.get(name='html version') pipe.setex(models.DB_HTML_VERSION, opt.content, models.DB_CACHE_TIMEOUT) # create dialogues d = [] for h in models.AppDialog.objects.all(): apps = h.apps.splitlines() logger.debug(u"Processing app dialog {0}".format( h.description)) d.append({ 'html': base64.b64encode(h.html_contents.encode('utf-8')), 'packages': apps }) pipe.setex(models.DB_HTML_CACHE, json.dumps(d), models.DB_CACHE_TIMEOUT) pipe.execute() logger.debug(u"Updated DB {0} cache with {1} values".format( settings.REDIS_DB, len(d))) if opt.content == version: return None return {'version': opt.content, 'data': d} except models.Option.DoesNotExist: # no htmls yet, skipping pipe.setex(models.DB_HTML_VERSION, 'None', models.DB_CACHE_TIMEOUT) pipe.execute() return None