def queue(): authz.require(authz.is_admin()) data = request_data() crawler_id = data.get('crawler_id') for cls in get_exposed_crawlers(): if crawler_id == cls.get_id(): incremental = bool(data.get('incremental', False)) execute_crawler.delay(crawler_id, incremental=incremental) return jsonify({'status': 'queued'}) return jsonify({'status': 'error', 'message': 'No such crawler'}, status=400)
def index(): authz.require(authz.is_admin()) crawlers = list(get_exposed_crawlers()) return jsonify({'results': crawlers, 'total': len(crawlers)})
def index(): authz.require(authz.is_admin()) crawlers = list(get_exposed_crawlers()) return jsonify(Pager(crawlers, limit=20))
def index(): request.authz.require(request.authz.is_admin) crawlers = list( sorted(get_exposed_crawlers(), key=lambda c: c.CRAWLER_NAME)) return jsonify(Pager(crawlers, limit=20))