def get_content_timeseries(user, org, content_item_id): """ Query an individual content timeseries. """ c = ContentItem.query\ .filter_by(id=content_item_id)\ .filter_by(org_id=org.id)\ .first() if not c: raise NotFoundError( 'A ContentItem with ID {} does not exist' .format(content_item_id)) # select / exclude select, exclude = arg_list('select', typ=str, exclusions=True, default=['*']) if '*' in select: exclude = [] select = "*" kw = dict( unit=arg_str('unit', default='hour'), sparse=arg_bool('sparse', default=True), sig_digits=arg_int('sig_digits', default=2), select=select, exclude=exclude, rm_nulls=arg_bool('rm_nulls', default=False), time_since_start=arg_bool('time_since_start', default=False), transform=arg_str('transform', default=None), before=arg_date('before', default=None), after=arg_date('after', default=None) ) q = QueryContentMetricTimeseries(org, [content_item_id], **kw) return jsonify(list(q.execute()))
def get_all_content_comparisons(user, org): """ Refresh content comparisons. """ refresh = arg_bool('refresh', default=False) cache_details = arg_bool('cache_details', default=False) if refresh: comparisons_cache.invalidate(org.id) cr = comparisons_cache.get(org.id) if refresh and cr.is_cached: raise InternalServerError( 'Something went wrong with the cache invalidation process.') if cache_details: return jsonify({'cache': cr, 'comparisons': cr.value}) return jsonify(cr.value)
def extract(user): url = arg_str('url', default=None) type = arg_str('type', default='article') force_refresh = arg_bool('force_refresh', default=False) format = arg_str('format', default='json') if not url: raise RequestError("A url is required.") if force_refresh: extract_cache.debug = True cr = extract_cache.get(url, type) if not cr: extract_cache.invalidate(url, type) raise InternalServerError('Something went wrong. Try again.') resp = { 'cache': cr, 'data': cr.value } if format == 'html': return render_template( 'extract_preview.html', data=resp) return jsonify(resp)
def response_from_cache(cr): """ Format a response from a cache object. """ if arg_bool('cache_details', default=False): resp = jsonify({'cache': cr, 'comparisons': cr.value}) else: resp = jsonify(cr.value) resp.headers['Last-Modified'] = cr.last_modified return resp
def get_author(user, org, author_id): """ Get an author. """ incl_content = arg_bool('incl_content', default=False) a = Author.query\ .filter_by(id=author_id, org_id=org.id)\ .first() if not a: raise NotFoundError( 'Author with ID "{}" does not exist."'.format(author_id)) return jsonify(a.to_dict(incl_content=incl_content))
def get_one_content_comparisons(user, org, type): """ Get one content comparison. """ # allow the urls to be pretty slugs :) type = type.replace('-', "_") if type not in CONTENT_METRIC_COMPARISONS: raise RequestError( "'{}' is an invalid content metric comparison. Choose from {}" .format(type, ", ".join(CONTENT_METRIC_COMPARISONS))) refresh = arg_bool('refresh', default=False) cache_details = arg_bool('cache_details', default=False) if refresh: comparison_types[type].invalidate(org.id) cr = comparison_types[type].get(org.id) if refresh and cr.is_cached: raise InternalServerError( 'Something went wrong with the comparison cache invalidation process.') if cache_details: return jsonify({'cache': cr, 'comparison': cr.value.get(type)}) return jsonify(cr.value.get(type))
def get_author(user, org, author_id): """ Get an author. """ incl_content = arg_bool('incl_content', default=False) a = fetch_by_id_or_field(Author, 'name', author_id, org_id=org.id, transform='upper') if not a: raise NotFoundError( 'Author with ID/Name "{}" does not exist."' .format(author_id)) return jsonify(a.to_dict(incl_content=incl_content))
def get_org_timeseries(user, org_id_slug): # fetch org org = fetch_by_id_or_field(Org, 'slug', org_id_slug) # if it still doesn't exist, raise an error. if not org: raise NotFoundError( 'This Org does not exist.') # ensure the active user can edit this Org if user.id not in org.user_ids: raise ForbiddenError( 'You are not allowed to access this Org') # todo: validate which metrics you can select. # select / exclude select, exclude = arg_list( 'select', typ=str, exclusions=True, default=['all']) if 'all' in select: exclude = [] select = "all" kw = dict( unit=arg_str('unit', default='hour'), sparse=arg_bool('sparse', default=True), sig_digits=arg_int('sig_digits', default=2), select=select, exclude=exclude, group_by_id=arg_bool('group_by_id', default=True), rm_nulls=arg_bool('rm_nulls', default=False), time_since_start=arg_bool('time_since_start', default=False), transform=arg_str('transform', default=None), before=arg_date('before', default=None), after=arg_date('after', default=None) ) q = QueryOrgMetricTimeseries(org, [org.id], **kw) return jsonify(list(q.execute()))
def get_author(user, org, author_id): """ Get an author. """ incl_content = arg_bool('incl_content', default=False) a = Author.query\ .filter_by(id=author_id, org_id=org.id)\ .first() if not a: raise NotFoundError( 'Author with ID "{}" does not exist."' .format(author_id)) return jsonify(a.to_dict(incl_content=incl_content))
def list_authors(user, org): """ Get all authors. """ incl_content = arg_bool('incl_content', default=False) q = arg_str('q', default=None) authors = Author.query\ .filter_by(org_id=org.id) if q: authors = authors.search(q, vector=Author.search_vector, sort=True) return jsonify([a.to_dict(incl_content=incl_content) for a in authors.all()])
def get_author(user, org, author_id): """ Get an author. """ incl_content = arg_bool('incl_content', default=False) a = fetch_by_id_or_field(Author, 'name', author_id, org_id=org.id, transform='upper') if not a: raise NotFoundError( 'Author with ID/Name "{}" does not exist."'.format(author_id)) return jsonify(a.to_dict(incl_content=incl_content))
def list_authors(user, org): """ Get all authors. """ incl_content = arg_bool('incl_content', default=False) q = arg_str('q', default=None) authors = Author.query\ .filter_by(org_id=org.id) if q: authors = authors.search(q, vector=Author.search_vector, sort=True) return jsonify( [a.to_dict(incl_content=incl_content) for a in authors.all()])
def org_remove_user(user, org_id_slug, user_email): if not user.admin: raise AuthError( 'You must be an admin to remove a user from an Org.') # fetch org org = fetch_by_id_or_field(Org, 'slug', org_id_slug) # if it still doesn't exist, raise an error. if not org: raise NotFoundError('This Org does not exist.') # localize localize(org) # ensure the active user can edit this Org if user.id not in org.user_ids: raise ForbiddenError( "You are not allowed to access this Org.") # get this existing user by id / email existing_user = fetch_by_id_or_field(User, 'email', user_email) if not existing_user: raise RequestError( 'User "{}" does not yet exist' .format(user_email)) # ensure that user is not already a part of this Org. if existing_user.id not in org.user_ids: raise RequestError( 'User "{}" is not a part of Org "{}"' .format(existing_user.email, org.name)) # remove the user from the org org.users.remove(existing_user) # if we're force-deleting the user, do so # but make sure their recipes are re-assigned # to the super-user if arg_bool('force', False): cmd = "UPDATE recipes set user_id={} WHERE user_id={}"\ .format(org.super_user.id, existing_user.id) db.session.execute(cmd) db.session.delete(user) db.session.commit() return delete_response()
def get_comparison(*args, **kwargs): """ Get a single comparison. """ level = kwargs.pop('level') type = kwargs.pop('type') level = parse_comparison_level(level) type = parse_comparison_type(type, level) refresh = arg_bool('refresh', default=False) fx = comparison_types[level][type] if refresh: fx.invalidate(*args, **kwargs) cr = fx.get(*args, **kwargs) if refresh and cr.is_cached: raise InternalServerError( 'Something went wrong with the cache invalidation process.') return cr
def exec_query(user): """ Only the super user can access the sql api. This is primarily intended for internal recipes which may operate on machines without access to the databse. """ if not user.super_user: raise ForbiddenError( "Only the super user can access the SQL API.") if request.method == "POST": q = request_data().get('query', None) if request.method == "GET": q = arg_str('query', default=None) if not q: raise RequestError('A query - "q" is required.') stream = arg_bool('stream', default=True) try: results = db.session.execute(q) except Exception as e: raise RequestError( "There was an error executing this query: " "{}".format(e.message)) def generate(): try: for row in ResultIter(results): if stream: yield obj_to_json(row) + "\n" else: yield row except ResourceClosedError: resp = {'success': True} if stream: yield obj_to_json(resp) + "\n" else: yield resp if stream: return Response(stream_with_context(generate())) data = list(generate()) if len(data) == 1: if data[0]['success']: data = data[0] return jsonify(data)
def org_remove_user(user, org_id_slug, user_email): if not user.admin: raise AuthError('You must be an admin to remove a user from an Org.') # fetch org org = fetch_by_id_or_field(Org, 'slug', org_id_slug) # if it still doesn't exist, raise an error. if not org: raise NotFoundError('This Org does not exist.') # localize localize(org) # ensure the active user can edit this Org if user.id not in org.user_ids: raise ForbiddenError("You are not allowed to access this Org.") # get this existing user by id / email existing_user = fetch_by_id_or_field(User, 'email', user_email) if not existing_user: raise RequestError('User "{}" does not yet exist'.format(user_email)) # ensure that user is not already a part of this Org. if existing_user.id not in org.user_ids: raise RequestError('User "{}" is not a part of Org "{}"'.format( existing_user.email, org.name)) # remove the user from the org org.users.remove(existing_user) # if we're force-deleting the user, do so # but make sure their recipes are re-assigned # to the super-user if arg_bool('force', False): cmd = "UPDATE recipes set user_id={} WHERE user_id={}"\ .format(org.super_user.id, existing_user.id) db.session.execute(cmd) db.session.delete(user) db.session.commit() return delete_response()
def exec_query(user): """ Only the super user can access the sql api. This is primarily intended for internal recipes which may operate on machines without access to the databse. """ if not user.super_user: raise ForbiddenError("Only the super user can access the SQL API.") if request.method == "POST": q = request_data().get('query', None) if request.method == "GET": q = arg_str('query', default=None) if not q: raise RequestError('A query - "q" is required.') stream = arg_bool('stream', default=True) try: results = db.session.execute(q) except Exception as e: raise RequestError("There was an error executing this query: " "{}".format(e.message)) def generate(): try: for row in ResultIter(results): if stream: yield obj_to_json(row) + "\n" else: yield row except ResourceClosedError: resp = {'success': True} if stream: yield obj_to_json(resp) + "\n" else: yield resp if stream: return Response(stream_with_context(generate())) data = list(generate()) if len(data) == 1: if data[0]['success']: data = data[0] return jsonify(data)
def update_me(user): """ Update yourself. """ # get the form. req_data = request_data() email = req_data.get('email') old_password = req_data.get('old_password') new_password = req_data.get('new_password') name = req_data.get('name') # edit user. if email: # validate the email address: if not mail.validate(email): raise RequestError( "'{}' is not a valid email address." .format(email)) user.email = email if old_password and new_password: if not user.check_password(old_password): raise ForbiddenError('Invalid password.') user.set_password(new_password) if name: user.name = name # check if we should refresh the apikey if arg_bool('refresh_apikey', False): user.set_apikey() db.session.add(user) db.session.commit() return jsonify(user.to_dict(incl_apikey=True))
def update_me(user): """ Update yourself. """ # get the form. req_data = request_data() email = req_data.get('email') old_password = req_data.get('old_password') new_password = req_data.get('new_password') name = req_data.get('name') # edit user. if email: # validate the email address: if not mail.validate(email): raise RequestError( "'{}' is not a valid email address.".format(email)) user.email = email if old_password and new_password: if not user.check_password(old_password): raise ForbiddenError('Invalid password.') user.set_password(new_password) if name: user.name = name # check if we should refresh the apikey if arg_bool('refresh_apikey', False): user.set_apikey() db.session.add(user) db.session.commit() return jsonify(user.to_dict(incl_apikey=True))