def create_similar(): data = request.json if not data.get('recurrence'): return jsonify(error='missing recurrence') if 'id' in data: id = ObjectId(data['id']) type = data.get('type') search = _get_object_search(id, type) if not search: return jsonify(error='%s %s does not exist' % (type, id)) similar = { 'name': search['name'], 'category': search['category'], } else: if not data.get('name'): return jsonify(error='missing name') if not data.get('category'): return jsonify(error='missing category') similar = { 'name': clean(data['name'], 1), 'category': data['category'], } similar['recurrence'] = int(data['recurrence']) similar['langs'] = data.get('langs') or [] if not SimilarSearch.add(**similar): return jsonify(error='failed to create similar %s' % similar) return jsonify(result=True)
def _has_similar(cache, obj): if 'similar' not in cache: cache['similar'] = list(SimilarSearch.find()) for res in cache['similar']: if res['name'] == obj['name'] \ and res['category'] == obj['category']: return True return False
def update_similar(): data = request.json if not data.get('_id'): return jsonify(error='missing id') id = ObjectId(data['_id']) if not data.get('name'): return jsonify(error='missing name') if not data.get('category'): return jsonify(error='missing category') if not data.get('recurrence'): return jsonify(error='missing recurrence') info = { 'name': data['name'], 'category': data['category'], 'langs': data.get('langs') or [], 'recurrence': int(data['recurrence']), } SimilarSearch.update({'_id': id}, {'$set': info}, safe=True) return jsonify(result=True)
def remove_media(): data = request.json ids = data.get('ids') if not ids: return jsonify(error='missing ids') if not isinstance(ids, (tuple, list)): ids = [ids] spec = {'_id': {'$in': [ObjectId(i) for i in ids]}} type = data.get('type') if type == 'media': for id in ids: map(remove_file, Media.get_bases(id)) Media.remove(spec) elif type == 'search': Search.remove(spec) elif type == 'similar': SimilarSearch.remove(spec) else: return jsonify(error='unknown type %s' % type) return jsonify(result=True)
def list_media(type, skip, limit): cache = {} spec = {} category = request.args.get('category') if category: if type in ('search', 'similar'): spec['category'] = category else: spec['info.subtype'] = category query = request.args.get('query') if query: spec.update(_get_search_spec(query)) sort = request.args.get('sort', 'date') if sort == 'name': sort = [('name', ASCENDING)] elif sort == 'rating': sort = [('rating', DESCENDING)] else: sort = [('date', DESCENDING), ('created', DESCENDING)] params = {'sort': sort, 'skip': skip, 'limit': limit} items = [] if type == 'media': for res in Media.find(spec, **params): search = Media.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'release': for res in Release.find(spec, **params): search = Release.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'search': for res in Search.find(spec, **params): items.append(_get_object(res, type=type, has_search=True, has_similar=_has_similar(cache, res))) elif type == 'similar': for res in SimilarSearch.find(spec, **params): items.append(_get_object(res, type=type, has_similar=True)) return serialize({'result': items})
def process_similars(): count = 0 for search in SimilarSearch.find(sort=[('processed', ASCENDING)]): processed = search.get('processed') delta = timedelta(hours=search.get('recurrence', DEFAULT_RECURRENCE)) if processed and processed > datetime.utcnow() - delta: continue target = '%s.workers.dig.process_similar' % settings.PACKAGE_NAME get_factory().add(target=target, args=(search['_id'],), timeout=TIMEOUT_SEEK) count += 1 if count == WORKERS_LIMIT: break
def process_similar(similar_id): search = SimilarSearch.get(similar_id) if search: Similar(search).process() search['processed'] = datetime.utcnow() SimilarSearch.save(search, safe=True)