def _has_similar(cache, obj): if 'similar' not in cache: cache['similar'] = list(SimilarSearch.find()) for res in cache['similar']: if res['name'] == obj['name'] \ and res['category'] == obj['category']: return True return False
def list_media(type, skip, limit): cache = {} spec = {} category = request.args.get('category') if category: if type in ('search', 'similar'): spec['category'] = category else: spec['info.subtype'] = category query = request.args.get('query') if query: spec.update(_get_search_spec(query)) sort = request.args.get('sort', 'date') if sort == 'name': sort = [('name', ASCENDING)] elif sort == 'rating': sort = [('rating', DESCENDING)] else: sort = [('date', DESCENDING), ('created', DESCENDING)] params = {'sort': sort, 'skip': skip, 'limit': limit} items = [] if type == 'media': for res in Media.find(spec, **params): search = Media.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'release': for res in Release.find(spec, **params): search = Release.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'search': for res in Search.find(spec, **params): items.append(_get_object(res, type=type, has_search=True, has_similar=_has_similar(cache, res))) elif type == 'similar': for res in SimilarSearch.find(spec, **params): items.append(_get_object(res, type=type, has_similar=True)) return serialize({'result': items})
def process_similars(): count = 0 for search in SimilarSearch.find(sort=[('processed', ASCENDING)]): processed = search.get('processed') delta = timedelta(hours=search.get('recurrence', DEFAULT_RECURRENCE)) if processed and processed > datetime.utcnow() - delta: continue target = '%s.workers.dig.process_similar' % settings.PACKAGE_NAME get_factory().add(target=target, args=(search['_id'],), timeout=TIMEOUT_SEEK) count += 1 if count == WORKERS_LIMIT: break