def process(self): query = self._get_query() dst = Settings.get_settings('paths')['finished_download'] logger.info('processing %s search "%s"', self.category, query) self._search_url() for result in results(query, category=self.category, sort=self.session['sort_results'], pages_max=self.session['pages_max'], **self._get_filters(query)): if not result: self.session['nb_errors'] += 1 continue Result.add_result(result, search_id=self._id) if not result.auto: continue if self.safe and not result.safe: continue if result.get('hash'): spec = {'info.hash': result.hash} else: spec = {'src': result.url} if Transfer.find_one(spec): continue self.session['nb_results'] += 1 if not self._validate_result(result): self.session['nb_pending'] += 1 continue if self.mode == 'inc': self._add_next('episode') transfer_id = Transfer.add(result.url, dst, type=result.type) self.transfers.insert(0, transfer_id) self.session['nb_downloads'] += 1 logger.info('found "%s" on %s (%s)', result.title, result.plugin, result.url) if self.mode != 'ever': break
def get_search_results(): data = request.json if not data.get('id'): return jsonify(error='missing id') id = ObjectId(data['id']) items = {} for res in Result.find({'search_id': id}, sort=[('date', DESCENDING)]): urls = res['url'] if not isinstance(urls, (tuple, list)): urls = [urls] for i, url in enumerate(urls): seeds = res.get('seeds') if seeds == 0: continue items.setdefault(res['type'], []) item = {'url': url, 'size': res['size'], 'seeds': seeds} if len(urls) > 1: item['title'] = '%s (%s/%s)' % (res['title'], i + 1, len(urls)) else: item['title'] = res['title'] items[res['type']].append(item) return serialize({'result': items})