def get(self, groupname, name): """ Review the articles for a specific feed on this key. """ key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if not feed: abort(404) per_page = 10 parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() # Return a list of the JSONified Articles ordered by descending creation date and paginated. if args.content == True: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content != None, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ] elif args.content == False: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content == None, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ] return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ]
def get(self, groupname, name, terms): """ Search for articles within a feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) # parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) f = [f for f in fg.feeds if f.name == name] if not f: abort(404) f = f[0] return [a.jsonify() for a in \ Article.query.filter( and_(Article.feed == f, Article.title.like("%" + terms + "%"))) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ]
def get(self): """ Review all articles associated with this key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() # Construct a query for Articles ordered by descending creation date and paginated. if args.content == True: query = Article.query.filter(and_(Article.key == key, Article.content != None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) elif args.content == False: query = Article.query.filter(and_(Article.key == key, Article.content == None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) else: query = Article.query.filter(Article.key == key)\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) # Attach links to help consuming applications response = make_response(request.url, query) return response
def get(self): """ Paginate an array of feed groups associated with the requesting key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) parser.add_argument("content", type=bool, help="", required=False, default=None) args = parser.parse_args() query = FeedGroup.query.filter(FeedGroup.key == key)\ .order_by(desc(FeedGroup.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def get(self, groupname, name, terms): """ Search for articles within a feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) # parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) f = [f for f in fg.feeds if f.name == name] if not f: abort(404) f = f[0] return [a.jsonify() for a in \ Article.query.filter( and_(Article.feed == f, Article.title.like("%" + terms + "%"))) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ]
def get(self, groupname, name): """ Review the articles for a specific feed on this key. """ key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if not feed: abort(404) parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, default=1) parser.add_argument("per_page", type=int, default=10) parser.add_argument("content", type=bool, default=None) args = parser.parse_args() # Return a list of the JSONified Articles ordered by descending creation date and paginated. if args.content == True: query = Article.query.filter(and_(Article.key == key, Article.content != None, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) elif args.content == False: query = Article.query.filter(and_(Article.key == key, Article.content == None, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) query = Article.query.filter(and_(Article.key == key, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def get(self, groupname, terms): """ Return articles on feeds in this group with our search terms in the title. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) # parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) query = Article.query.filter( and_(Article.feed.has(group=fg), Article.title.like("%" + terms + "%")))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def post(self, groupname): "Rename a feedgroup or toggle active status" key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument( "name", type=str, help="Rename a feed group", ) parser.add_argument("active", type=bool, default=None) args = parser.parse_args() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) if args.name: if FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == args.name)).first(): return { "message": "A feed already exists with this name." }, 304 fg.name = args.name if args.active or args.active == False: fg.active = args.active db.session.add(fg) db.session.commit() return fg.jsonify()
def get(self, groupname, name): """ Review the articles for a specific feed on this key. """ key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if not feed: abort(404) parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() # Return a list of the JSONified Articles ordered by descending creation date and paginated. if args.content == True: query = Article.query.filter(and_(Article.key == key, Article.content != None, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) elif args.content == False: query = Article.query.filter(and_(Article.key == key, Article.content == None, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) query = Article.query.filter(and_(Article.key == key, Article.feed == feed))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def put(self): """ Create a new feed group, providing the name isn't already in use. """ key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, help="", required=True) parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False) args = parser.parse_args() # Check for this name already existing in the groups on this key if [fg for fg in key.feedgroups if fg.name == args.name]: return { "message": "Feed group %s already exists." % args.name }, 304 fg = FeedGroup(name=args.name, active=args.active) key.feedgroups.append(fg) db.session.add(fg) db.session.add(key) db.session.commit() return fg.jsonify(), 201
def post(self, groupname): "Rename a feedgroup or toggle active status" key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, help="Rename a feed group",) parser.add_argument("active", type=bool, default=None) args = parser.parse_args() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname) ).first() if not fg: restful.abort(404) if args.name: if FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == args.name) ).first(): return {"message":"A feed already exists with this name."}, 304 fg.name = args.name if args.active or args.active == False: fg.active = args.active db.session.add(fg) db.session.commit() return fg.jsonify()
def put(self, groupname): """ Create a new feed providing the name and url are unique. Feeds must be associated with a group. """ key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, help="", required=True) parser.add_argument("url", type=str, help="", required=True) parser.add_argument("schedule", type=str, help="", required=True) parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False) args = parser.parse_args() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: return {"message": "Unknown Feed Group %s" % groupname}, 304 # Verify the schedule try: parse_timings(args.schedule) except CronError, err: return {"message": err.message}, 500
def post(self, groupname, name): key = auth(forbid_reader_keys=True) feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if feed: app.inbox.put([0, "stop", [key, feed.name]]) return feed.jsonify() restful.abort(404)
def get(self, groupname): key = auth() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) return sum(len(f.articles) for f in fg.feeds)
def post(self, groupname, name): key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if feed: app.inbox.put([0, "stop", [key, feed.name]]) return feed.jsonify() restful.abort(404)
def get(self, groupname): key = auth() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) return sum(len(f.articles) for f in fg.feeds)
def get(self, groupname, name): """ Review a feed. """ key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if feed: return feed.jsonify() restful.abort(404)
def get(self, groupname): """ Review a specific feed group. """ key = auth() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) return fg.jsonify()
def post(self, groupname): key = auth(forbid_reader_keys=True) fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) for feed in fg.feeds: app.inbox.put([0, "stop", [key,feed.name]]) return {}
def get(self, groupname): """ Retrieve articles by feedgroup. """ key = auth() # Summon the group or 404. fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) parser.add_argument("content", type=bool, help="", required=False, default=None) args = parser.parse_args() if args.content == True: query = Article.query.filter( and_(Article.feed.has(group=fg), Article.content != None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) response = make_response(request.url, query) # for doc in response['data']: # if not doc['content_available']: # response['data'].remove(doc) # return response if args.content == False: query = Article.query.filter( and_(Article.feed.has(group=fg), Article.content == None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) query = Article.query.filter( Article.feed.has(group=fg))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def post(self, groupname): key = auth(forbid_reader_keys=True) fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) for feed in fg.feeds: app.inbox.put([0, "stop", [key, feed.name]]) return {}
def get(self, uid): """ Read an article. """ key = auth() article = Article.query.filter(and_(Article.key == key, Article.uid == uid)).first() if article: return article.jsonify(summary=True, content=True) restful.abort(404)
def get(self, groupname): """ Review a specific feed group. """ key = auth() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) return fg.jsonify()
def post(self, groupname): """ Start all feeds within a group. """ key = auth() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) for feed in fg.feeds: app.inbox.put([0, "start", [key,feed.name]]) return {}
def delete(self, uid): """ Delete an article. """ key = auth(forbid_reader_keys=True) article = Article.query.filter(and_(Article.key == key, Article.uid == uid)).first() if article: db.session.delete(article) db.session.commit() return {} restful.abort(404)
def put(self): """ Fetch an article without an associated feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("url",type=str, help="", required=True) args = parser.parse_args() try: article = fetch_feedless_article(key, args.url) except Exception, e: return {"Error": e.message}
def post(self, groupname): """ Start all feeds within a group. """ key = auth() fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) for feed in fg.feeds: app.inbox.put([0, "start", [key, feed.name]]) return {}
def get(self, terms): """ The /v1/articles/search/<terms> endpoint. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) parser.add_argument("content", type=bool, help="", required=False, default=None) args = parser.parse_args() if args.content == True: query = ( Article.query.filter( and_(Article.key == key, Article.content != None, Article.title.like("%" + terms + "%")) ) .order_by(desc(Article.created)) .paginate(args.page, args.per_page) ) response = make_response(request.url, query) # This method of manually pruning JSON documents because they # don't relate to items that have content can omit them from search # completely. They don't have content but they're showing up here in # content != None rather than content == None.. You could always just # comment out this next for loop for doc in response["data"]: if not doc["content_available"]: response["data"].remove(doc) return response elif args.content == False: query = ( Article.query.filter( and_(Article.key == key, Article.content == None, Article.title.like("%" + terms + "%")) ) .order_by(desc(Article.created)) .paginate(args.page, args.per_page) ) return make_response(request.url, query) query = ( Article.query.filter(and_(Article.key == key, Article.title.like("%" + terms + "%"))) .order_by(desc(Article.created)) .paginate(args.page, args.per_page) ) return make_response(request.url, query)
def get(self, terms): """ The /v1/articles/search/<terms> endpoint. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() if args.content == True: query = Article.query.filter( and_( Article.key == key, Article.content != None, Article.title.like("%" + terms + "%") ))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) response = make_response(request.url, query) # This method of manually pruning JSON documents because they # don't relate to items that have content can omit them from search # completely. They don't have content but they're showing up here in # content != None rather than content == None.. You could always just # comment out this next for loop for doc in response['data']: if not doc['content_available']: response['data'].remove(doc) return response elif args.content == False: query = Article.query.filter( and_( Article.key == key, Article.content == None, Article.title.like("%" + terms + "%") ))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) query = Article.query.filter( and_(Article.key == key, Article.title.like("%" + terms + "%")))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def get(self): """ Paginate an array of feed groups associated with the requesting key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, default=1) parser.add_argument("per_page", type=int, default=10) parser.add_argument("content", type=bool, default=None) args = parser.parse_args() query = FeedGroup.query.filter(FeedGroup.key == key)\ .order_by(desc(FeedGroup.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def get(self): """ Paginate an array of feed groups associated with the requesting key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() return [fg.jsonify() for fg in \ FeedGroup.query.filter(FeedGroup.key == key) .order_by(desc(FeedGroup.created)).paginate(args.page, args.per_page).items ]
def post(self, groupname, name): """ Modify an existing feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, help="") parser.add_argument("group", type=str, help="") parser.add_argument("url", type=str, help="") parser.add_argument("schedule", type=str, help="") parser.add_argument("active", type=bool, default=None, help="Feed is active") args = parser.parse_args() feed = Feed.query.filter(and_(Feed.key == key, Feed.name == name)).first() if not feed: restful.abort(404) if args.name: if Feed.query.filter(and_(Feed.key == key, Feed.name == args.name)).first(): return { "message": "A feed already exists with this name." }, 304 feed.name = args.name if args.group: pass if args.active != None: feed.active = args.active if args.url: feed.url = args.url if args.schedule: try: parse_timings(args.schedule) except CronError, err: return {"message": err.message}, 500 feed.schedule = args.schedule
def delete(self, groupname, name): """ Halt and delete a feed. Default to deleting its articles. """ key = auth() feed = Feed.query.filter(and_(Feed.key == key, Feed.name == name)).first() if not feed: restful.abort(404) app.inbox.put([0, "stop", [key, feed.name]]) app.log('%s: %s: Deleting feed "%s".' % (feed.key.name, feed.group.name, feed.name)) for a in feed.articles: db.session.delete(a) db.session.delete(feed) db.session.commit() return {}
def delete(self, groupname): key = auth(forbid_reader_keys=True) fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) count=0 for feed in fg.feeds: for article in feed.articles: count += 1 db.session.delete(article) db.session.delete(feed) db.session.delete(fg) db.session.commit() count = "{:,}".format(count) app.log('%s: Deleted feed group "%s". (%s articles)' % (key.name, fg.name, count)) return {}
def delete(self, groupname, name): """ Halt and delete a feed. Default to deleting its articles. """ key = auth(forbid_reader_keys=True) feed = Feed.query.filter(and_(Feed.key == key, Feed.name == name)).first() if not feed: restful.abort(404) app.inbox.put([0, "stop", [key, feed.name]]) app.log('%s: %s: Deleting feed "%s".' % (feed.key.name, feed.group.name, feed.name)) for a in feed.articles: db.session.delete(a) db.session.delete(feed) db.session.commit() return {}
def get(self, groupname, terms): """ Return articles on feeds in this group with our search terms in the title. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, default=1) parser.add_argument("per_page", type=int, default=10) # parser.add_argument("content", type=bool, default=None) args = parser.parse_args() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) query = Article.query.filter( and_(Article.feed.has(group=fg), Article.title.like("%" + terms + "%")))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def delete(self, groupname): key = auth(forbid_reader_keys=True) fg = FeedGroup.query.filter( and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) count = 0 for feed in fg.feeds: for article in feed.articles: count += 1 db.session.delete(article) db.session.delete(feed) db.session.delete(fg) db.session.commit() count = "{:,}".format(count) app.log('%s: Deleted feed group "%s". (%s articles)' % (key.name, fg.name, count)) return {}
def get(self, groupname): """ Retrieve articles by feedgroup. """ key = auth() # Summon the group or 404. fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, default=1) parser.add_argument("per_page", type=int, default=10) parser.add_argument("content", type=bool, default=None) args = parser.parse_args() if args.content == True: query = Article.query.filter( and_(Article.feed.has(group=fg), Article.content != None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) response = make_response(request.url, query) # for doc in response['data']: # if not doc['content_available']: # response['data'].remove(doc) # return response if args.content == False: query = Article.query.filter( and_(Article.feed.has(group=fg), Article.content == None))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query) query = Article.query.filter( Article.feed.has(group=fg))\ .order_by(desc(Article.created)).paginate(args.page, args.per_page) return make_response(request.url, query)
def get(self, groupname): """ Retrieve articles by feedgroup. """ key = auth() # Summon the group or 404. fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() if args.content == True: response = [a.jsonify() for a in \ Article.query.filter( and_(Article.feed.has(group=fg), Article.content != None)) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] for doc in response: if not doc['content_available']: response.remove(doc) return response if args.content == False: return [a.jsonify() for a in \ Article.query.filter( and_(Article.feed.has(group=fg), Article.content == None)) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] return [a.jsonify() for a in \ Article.query.filter( Article.feed.has(group=fg)) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ]
def get(self): """ Review all articles associated with this key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) parser.add_argument("content", type=bool, help="", required=False, default=None) args = parser.parse_args() # Return a list of the JSONified Articles ordered by descending creation date and paginated. if args.content == True: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content != None)) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] elif args.content == False: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content == None)) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] return [a.jsonify() for a in \ Article.query.filter(Article.key == key) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ]
def put(self): """ Create a new feed group, providing the name isn't already in use. """ key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, required=True) parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False) args = parser.parse_args() # Check for this name already existing in the groups on this key if [fg for fg in key.feedgroups if fg.name == args.name]: return {"message":"Feed group %s already exists." % args.name}, 304 fg = FeedGroup(name=args.name, active=args.active) key.feedgroups.append(fg) db.session.add(fg) db.session.add(key) db.session.commit() return fg.jsonify(), 201
def post(self, groupname, name): """ Modify an existing feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("name",type=str, help="") parser.add_argument("group",type=str, help="") parser.add_argument("url",type=str, help="") parser.add_argument("schedule",type=str, help="") parser.add_argument("active",type=bool, default=None, help="Feed is active") args = parser.parse_args() feed = Feed.query.filter(and_(Feed.key == key, Feed.name == name)).first() if not feed: restful.abort(404) if args.name: if Feed.query.filter(and_(Feed.key == key, Feed.name == args.name)).first(): return {"message":"A feed already exists with this name."}, 304 feed.name = args.name if args.group: pass if args.active != None: feed.active = args.active if args.url: feed.url = args.url if args.schedule: try: parse_timings(args.schedule) except CronError, err: return {"message": err.message}, 500 feed.schedule = args.schedule
def put(self, groupname): """ Create a new feed providing the name and url are unique. Feeds must be associated with a group. """ key = auth(forbid_reader_keys=True) parser = restful.reqparse.RequestParser() parser.add_argument("name", type=str, required=True) parser.add_argument("url", type=str, required=True) parser.add_argument("schedule", type=str, required=True) parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False) args = parser.parse_args() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: return {"message":"Unknown Feed Group %s" % groupname}, 304 # Verify the schedule try: parse_timings(args.schedule) except CronError, err: return {"message": err.message}, 500
def get(self): """ Review all feeds associated with this key. """ key = auth() return [feed.jsonify() for feed in key.feeds]
def get(self): """ Return the amount of articles belonging to an API key. """ key = auth() return len(key.articles)
def get(self, terms): """ Return the amount of articles belonging to an API key. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page", type=int, help="", required=False, default=1) parser.add_argument("per_page", type=int, help="", required=False, default=10) parser.add_argument("content", type=bool, help="", required=False, default=None) args = parser.parse_args() if args.content == True: response = [a.jsonify() for a in \ Article.query.filter( and_( Article.key == key, Article.content != None, Article.title.like("%" + terms + "%") )) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] # This method of manually pruning JSON documents because they # don't relate to items that have content can omit them from search # completely. They don't have content but they're showing up here in # content != None rather than content == None.. You could always just # return the list comprehension defined above. for doc in response: if not doc['content_available']: response.remove(doc) return response elif args.content == False: return [a.jsonify() for a in \ Article.query.filter( and_( Article.key == key, Article.content == None, Article.title.like("%" + terms + "%") )) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] return [a.jsonify() for a in \ Article.query.filter( and_(Article.key == key, Article.title.like("%" + terms + "%"))) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ] return [ a.jsonify() for a in Article.query.filter( and_(Article.key == key, Article.title.like("%" + terms + "%"))).all() ]