def process_media(): count = 0 root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' for media in Media.find({ 'type': 'video', 'files': {'$exists': True}, '$or': [ {'updated_subs': {'$exists': False}}, {'updated_subs': {'$lt': datetime.utcnow() - DELTA_UPDATE_DEF[-1][1]}}, ], }, sort=[('updated_subs', ASCENDING)]): if not [f for f in media['files'] if f.startswith(root_path)]: continue if not validate_media(media): continue target = '%s.workers.subtitles.search_subtitles' % settings.PACKAGE_NAME get_factory().add(target=target, args=(media['_id'],), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: return
def list_media(type, skip, limit): cache = {} spec = {} category = request.args.get('category') if category: if type in ('search', 'similar'): spec['category'] = category else: spec['info.subtype'] = category query = request.args.get('query') if query: spec.update(_get_search_spec(query)) sort = request.args.get('sort', 'date') if sort == 'name': sort = [('name', ASCENDING)] elif sort == 'rating': sort = [('rating', DESCENDING)] else: sort = [('date', DESCENDING), ('created', DESCENDING)] params = {'sort': sort, 'skip': skip, 'limit': limit} items = [] if type == 'media': for res in Media.find(spec, **params): search = Media.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'release': for res in Release.find(spec, **params): search = Release.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'search': for res in Search.find(spec, **params): items.append(_get_object(res, type=type, has_search=True, has_similar=_has_similar(cache, res))) elif type == 'similar': for res in SimilarSearch.find(spec, **params): items.append(_get_object(res, type=type, has_similar=True)) return serialize({'result': items})
def update_path(): paths = Settings.get_settings('paths') excl = paths['media_root_exclude'] re_excl = re.compile(r'^(%s)/' % '|'.join([re.escape(p.rstrip('/')) for p in excl])) for file in iter_files(str(paths['media_root'])): if not re_excl.search(file): Media.add_file(file) time.sleep(.05) for media in Media.find({'files': {'$exists': True}}, timeout=False): files_orig = media['files'][:] for file in files_orig: if not os.path.exists(file) or re_excl.search(file): media['files'].remove(file) if not media['files'] and not media.get('urls'): Media.remove({'_id': media['_id']}, safe=True) elif media['files'] != files_orig: Media.save(media, safe=True) Work.set_info(NAME, 'updated', datetime.utcnow())
def update_media(): for res in Media.find({'files': {'$exists': True}}, timeout=False): mtime = get_mtime(res['files']) if mtime: Media.update({'_id': res['_id']}, {'$set': {'date': mtime}}, safe=True)