def _search_url(self): date = self.session['last_url_search'] if date and date > datetime.utcnow() - DELTA_URL_SEARCH: return False if self.category not in NETFLIX_CATEGORIES: return False netflix_ = Settings.get_settings('netflix') if not netflix_['username'] or not netflix_['password']: return False netflix = get_netflix_object(netflix_['username'], netflix_['password']) if not netflix: return False res = netflix.get_info(self.name, self.category) if res: Media.add_url(url=res['url'], name=res['title'], category=self.category) logger.info('found "%s" on netflix (%s)', res['title'], res['url']) if self.category == 'movies': MSearch.remove({'_id': self._id}, safe=True) logger.info('removed %s search "%s": found url %s', self.category, self.name, res['url']) return True MSearch.update({'_id': self._id}, {'$set': {'session.last_url_search': datetime.utcnow()}}, safe=True) return False
def _search_file(self): if self.mode == 'ever': return date = self.session['last_file_search'] if date and date > datetime.utcnow() - DELTA_FILE_SEARCH: return media_ = Media.search(**self) files = [] for res in media_: files.extend(res.get('files', [])) if media_ and files: if self.mode == 'inc': self._add_next('episode') src = self.get('src') if src: Media.update({'_id': {'$in': [m['_id'] for m in media_]}}, {'$set': {'src': src}}, safe=True) MSearch.remove({'_id': self._id}, safe=True) logger.info('removed %s search "%s": found files %s', self.category, self._get_query(), files) return True MSearch.update({'_id': self._id}, {'$set': {'session.last_file_search': datetime.utcnow()}}, safe=True)
def update_search(): data = request.json if not data.get('_id'): return jsonify(error='missing id') id = ObjectId(data['_id']) if not data.get('name'): return jsonify(error='missing name') if not data.get('category'): return jsonify(error='missing category') if not data.get('mode'): return jsonify(error='missing mode') info = { 'name': data['name'], 'category': data['category'], 'langs': data.get('langs') or [], 'mode': data['mode'], 'safe': data.get('safe', True), 'session': {}, } if data['category'] == 'music': info['album'] = data.get('album') if not info['album']: return jsonify(error='missing album') for attr in ('season', 'episode'): val = data.get(attr) info[attr] = int(val) if val else None Search.update({'_id': id}, {'$set': info}, safe=True) return jsonify(result=True)
def reset_search(): data = request.json if not data.get('id'): return jsonify(error='missing id') Search.update({'_id': ObjectId(data['id'])}, {'$set': {'session': {}}}, safe=True) return jsonify(result=True)
def _is_obsolete(self): if self.mode in ('inc', 'ever'): return date = self.session['last_result'] or self.session['first_search'] if date and date < datetime.utcnow() - DELTA_OBSOLETE: MSearch.remove({'_id': self._id}, safe=True) logger.info('removed search "%s" (no result for %d days)', self._get_query(), DELTA_OBSOLETE.days) return True
def update_search_safe(): data = request.json if not data.get('id'): return jsonify(error='missing id') id = ObjectId(data['id']) safe = data.get('safe') if not isinstance(safe, bool): return jsonify(error='invalid safe value') Search.update({'_id': id}, {'$set': {'safe': safe, 'session': {}}}, safe=True) return jsonify(result=True)
def save(self): now = datetime.utcnow() if not self.session['first_search']: self.session['first_search'] = now if self.session['nb_results']: self.session['last_result'] = now if self.session['nb_downloads']: self.session['last_download'] = now if self.session['nb_errors'] <= 1: self.session['last_search'] = now self.session['nb_processed'] += 1 MSearch.save(self, safe=True)
def _get_object_search(id, type): obj = None search = None if type == 'media': obj = Media.get(id) if obj: search = Media.get_search(obj) elif type == 'release': obj = Release.get(id) if obj: search = Release.get_search(obj) elif type == 'search': obj = Search.get(id) if obj: search = { 'name': obj['name'], 'category': obj['category'], 'search_id': id, } if obj.get('album'): search['album'] = obj['album'] if obj and search: search['extra'] = obj.get('extra', {}) return search
def create_media(): data = request.json type = data.get('type') if not type: return jsonify(error='missing type') langs = data.get('langs') or [] if 'id' in data: if not data.get('mode'): return jsonify(error='missing mode') id = ObjectId(data['id']) search = _get_object_search(id, type) if not search: return jsonify(error='%s %s does not exist' % (type, id)) search['langs'] = langs search['mode'] = data['mode'] search['safe'] = False if not Search.add(**search): return jsonify(error='failed to create search %s' % search) return jsonify(result=True) name = data.get('name') if not name: return jsonify(error='missing name') if type == 'url': dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(name, dst) except Exception, e: return jsonify(error='failed to create transfer: %s' % str(e))
def add_search(**search): # TODO: find algorithm for unsafe searches # if search['category'] in ['anime', 'tv']: # search['safe'] = False search['safe'] = False if not _media_exists(**search) and Search.add(**search): logger.info('added search %s', search) return True
def _has_search(cache, obj): if 'searches' not in cache: cache['searches'] = list(Search.find()) for res in cache['searches']: if res['name'] == obj['name'] \ and res['category'] == obj['category'] \ and res.get('season') == obj.get('season') \ and res.get('episode') == obj.get('episode') \ and res.get('album') == obj.get('album'): return True return False
def remove_media(): data = request.json ids = data.get('ids') if not ids: return jsonify(error='missing ids') if not isinstance(ids, (tuple, list)): ids = [ids] spec = {'_id': {'$in': [ObjectId(i) for i in ids]}} type = data.get('type') if type == 'media': for id in ids: map(remove_file, Media.get_bases(id)) Media.remove(spec) elif type == 'search': Search.remove(spec) elif type == 'similar': SimilarSearch.remove(spec) else: return jsonify(error='unknown type %s' % type) return jsonify(result=True)
def list_media(type, skip, limit): cache = {} spec = {} category = request.args.get('category') if category: if type in ('search', 'similar'): spec['category'] = category else: spec['info.subtype'] = category query = request.args.get('query') if query: spec.update(_get_search_spec(query)) sort = request.args.get('sort', 'date') if sort == 'name': sort = [('name', ASCENDING)] elif sort == 'rating': sort = [('rating', DESCENDING)] else: sort = [('date', DESCENDING), ('created', DESCENDING)] params = {'sort': sort, 'skip': skip, 'limit': limit} items = [] if type == 'media': for res in Media.find(spec, **params): search = Media.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'release': for res in Release.find(spec, **params): search = Release.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'search': for res in Search.find(spec, **params): items.append(_get_object(res, type=type, has_search=True, has_similar=_has_similar(cache, res))) elif type == 'similar': for res in SimilarSearch.find(spec, **params): items.append(_get_object(res, type=type, has_similar=True)) return serialize({'result': items})
def process_searches(): count = 0 for search in MSearch.find( sort=[('session.last_search', ASCENDING)]): search = Search(search) if not search.validate(): continue target = '%s.workers.search.process_search' % settings.PACKAGE_NAME get_factory().add(target=target, args=(search._id,), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: break
except Exception, e: logger.error('failed to create transfer for %s: %s', query, str(e)) return -1 count = 1 else: try: searches = get_searches(query) except QueryError, e: logger.info(str(e)) return -1 count = len(searches) if not searches: logger.info('no result for query "%s', query) else: for search in searches: if Search.add(**search): logger.info('created search %s', search) return count def process_file_queries(body): '''Process the file queries and return a list of patterns and replacement strings. ''' res = [] tree = html.fromstring(body) for element in tree.cssselect('body p'): line = FileLine(element) query = line.get_query() if query is None: continue count = process_query(query)
def process_search(search_id): search = MSearch.get(search_id) if search: search = Search(search) search.process() search.save()
def _get_query(self): return MSearch.get_query(self)
def _add_next(self, mode): '''Create a search for next episode or season. ''' search = MSearch.get_next(self, mode=mode) if search and MSearch.add(**search): logger.info('added search %s', search)
search = { 'name': clean(name, 1), 'category': type, 'mode': data['mode'], 'langs': langs, 'safe': False, } if type == 'music': search['album'] = data.get('album') if not search['album']: return jsonify(error='missing album') if type in ('tv', 'anime'): for attr in ('season', 'episode'): val = data.get(attr) search[attr] = int(val) if val else None if not Search.add(**search): return jsonify(error='failed to create search %s' % search) return jsonify(result=True) @app.route('/media/create/similar', methods=['POST', 'OPTIONS']) @crossdomain(origin='*') def create_similar(): data = request.json if not data.get('recurrence'): return jsonify(error='missing recurrence') if 'id' in data: id = ObjectId(data['id']) type = data.get('type') search = _get_object_search(id, type)