def update(id): user = obj_or_404(User.by_id(id)) authz.require(user.id == current_user.id or authz.is_admin()) user.update(request_data()) db.session.add(user) db.session.commit() return jsonify(user)
def view(id): authz.require(id == current_user.id or authz.is_admin()) user = obj_or_404(User.by_id(id)) data = user.to_dict() if user.id != current_user.id: del data['email'] return jsonify(data)
def index(): authz.require(authz.is_admin()) users = [] for user in User.all(): data = user.to_dict() del data['email'] users.append(data) return jsonify({'results': users, 'total': len(users)})
def view(slug): authz.require(authz.source_read(slug) and authz.is_admin()) source = obj_or_404(Source.by_slug(slug)) etag_cache_keygen(source) data = source.to_dict() data['can_write'] = authz.source_write(slug) if data['can_write']: data['users'] = [u.id for u in source.users] data['config'] = source.config return jsonify(data)
def queue(): authz.require(authz.is_admin()) data = request_data() crawler_id = data.get('crawler_id') for cls in get_exposed_crawlers(): if crawler_id == cls.get_id(): incremental = bool(data.get('incremental', False)) execute_crawler.delay(crawler_id, incremental=incremental) return jsonify({'status': 'queued'}) return jsonify({'status': 'error', 'message': 'No such crawler'}, status=400)
def states(): authz.require(authz.is_admin()) q = db.session.query(CrawlerState) q = q.filter(CrawlerState.status == CrawlerState.STATUS_FAIL) if 'crawler_id' in request.args: q = q.filter(CrawlerState.crawler_id == request.args.get('crawler_id')) if 'crawler_run' in request.args: q = q.filter(CrawlerState.crawler_run == request.args.get('crawler_run')) if 'error_type' in request.args: q = q.filter(CrawlerState.error_type == request.args.get('error_type')) q = q.order_by(CrawlerState.created_at.desc()) return jsonify(Pager(q))
def states(): authz.require(authz.is_admin()) q = db.session.query(CrawlerState) q = q.filter(CrawlerState.status == CrawlerState.STATUS_FAIL) if 'crawler_id' in request.args: q = q.filter(CrawlerState.crawler_id == request.args.get('crawler_id')) if 'crawler_run' in request.args: q = q.filter(CrawlerState.crawler_run == request.args.get('crawler_run')) if 'error_type' in request.args: q = q.filter(CrawlerState.error_type == request.args.get('error_type')) q = q.order_by(CrawlerState.created_at.desc()) response = Pager(q).to_dict() return jsonify(response)
def status(): oauth_providers = {} for name, provider in PROVIDERS.items(): if not isinstance(provider, Stub): oauth_providers[name] = url_for('.login', provider=name) return jsonify({ 'logged_in': authz.logged_in(), 'is_admin': authz.is_admin(), 'api_key': current_user.api_key if authz.logged_in() else None, 'user': current_user if authz.logged_in() else None, 'permissions': {}, 'logins': oauth_providers, 'logout': url_for('.logout') })
def create(): authz.require(authz.is_admin()) src = Source.create(request_data(), current_user) db.session.commit() return view(src.slug)
def index(): authz.require(authz.is_admin()) crawlers = list(get_exposed_crawlers()) return jsonify({'results': crawlers, 'total': len(crawlers)})
def ui_admin(**kwargs): authz.require(authz.is_admin()) return render_template("layout.html", templates=angular_templates())
def is_accessible(self): return authz.is_admin()
def index(): authz.require(authz.is_admin()) crawlers = list(get_exposed_crawlers()) return jsonify(Pager(crawlers, limit=20))