def populate_db(): db_create() ucontr = UserController() ccontr = CategoryController() fcontr = FeedController() acontr = ArticleController() ccontr = CategoryController() user1, user2 = [ ucontr.create(login=name, email="*****@*****.**" % name, password=name) for name in ["user1", "user2"] ] article_total = 0 for user in (user1, user2): for i in range(3): cat_id = None if i: cat_id = ccontr.create(user_id=user.id, name="category%d" % i).id feed = fcontr.create(link="feed%d" % i, user_id=user.id, category_id=cat_id, title="%s feed%d" % (user.login, i)) for j in range(3): entry = "%s %s article%d" % (user.login, feed.title, j) article_total += 1 acontr.create(entry_id=entry, link='http://test.te/%d' % article_total, feed_id=feed.id, user_id=user.id, category_id=cat_id, title=entry, content="content %d" % article_total)
def reset_errors(feed_id): feed_contr = FeedController(g.user.id) feed = feed_contr.get(id=feed_id) feed_contr.update({'id': feed_id}, {'error_count': 0, 'last_error': ''}) flash(gettext('Feed %(feed_title)r successfully updated.', feed_title=feed.title), 'success') return redirect(request.referrer or url_for('home'))
def popular(): """ Return the most popular feeds for the last nb_days days. """ # try to get the 'recent' popular websites, created after # 'not_created_before' # ie: not_added_before = date_last_added_feed - nb_days try: nb_days = int(request.args.get("nb_days", 365)) except ValueError: nb_days = 10000 last_added_feed = (FeedController().read().order_by( desc("created_date")).limit(1).all()) if last_added_feed: date_last_added_feed = last_added_feed[0].created_date else: date_last_added_feed = datetime.now() not_added_before = date_last_added_feed - timedelta(days=nb_days) filters = {} filters["created_date__gt"] = not_added_before filters["private"] = False filters["error_count__lt"] = conf.DEFAULT_MAX_ERROR feeds = FeedController().count_by_link(**filters) sorted_feeds = sorted(list(feeds.items()), key=operator.itemgetter(1), reverse=True) return render_template("popular.html", popular=sorted_feeds)
def delete(feed_id=None): feed_contr = FeedController(g.user.id) feed = feed_contr.get(id=feed_id) feed_contr.delete(feed_id) flash(gettext("Feed %(feed_title)s successfully deleted.", feed_title=feed.title), 'success') return redirect(url_for('home'))
def execute(self): delta = datetime.now() - timedelta(minutes=LATE_AFTER + FETCH_RATE + 1) total = FeedController().read().count() print("feeds.value %d" % len(FeedController().list_late(delta, limit=total))) print("feeds_total.value %d" % FeedController().read().count())
def populate_db(): db_create() ucontr = UserController() ccontr = CategoryController() fcontr = FeedController() acontr = ArticleController() ccontr = CategoryController() user1, user2 = [ucontr.create(login=name, email="*****@*****.**" % name, password=name) for name in ["user1", "user2"]] article_total = 0 for user in (user1, user2): for i in range(3): cat_id = None if i: cat_id = ccontr.create(user_id=user.id, name="category%d" % i).id feed = fcontr.create(link="feed%d" % i, user_id=user.id, category_id=cat_id, title="%s feed%d" % (user.login, i)) for j in range(3): entry = "%s %s article%d" % (user.login, feed.title, j) article_total += 1 acontr.create(entry_id=entry, link='http://test.te/%d' % article_total, feed_id=feed.id, user_id=user.id, category_id=cat_id, title=entry, content="content %d" % article_total)
def bookmarklet(): feed_contr = FeedController(current_user.id) url = (request.args if request.method == 'GET' else request.form)\ .get('url', None) if not url: flash(gettext("Couldn't add feed: url missing."), "error") raise BadRequest("url is missing") feed_exists = list(feed_contr.read(__or__={'link': url, 'site_link': url})) if feed_exists: flash(gettext("Couldn't add feed: feed already exists."), "warning") return redirect(url_for('feed.form', feed_id=feed_exists[0].id)) try: feed = construct_feed_from(url) except requests.exceptions.ConnectionError: flash(gettext("Impossible to connect to the address: {}.".format(url)), "danger") return redirect(url_for('home')) except Exception: logger.exception('something bad happened when fetching %r', url) return redirect(url_for('home')) if not feed.get('link'): feed['enabled'] = False flash( gettext("Couldn't find a feed url, you'll need to find a Atom or" " RSS link manually and reactivate this feed"), 'warning') feed = feed_contr.create(**feed) flash(gettext('Feed was successfully created.'), 'success') if feed.enabled and conf.CRAWLING_METHOD == "default": misc_utils.fetch(current_user.id, feed.id) flash(gettext("Downloading articles for the new feed..."), 'info') return redirect(url_for('feed.form', feed_id=feed.id))
def bookmarklet(): feed_contr = FeedController(g.user.id) url = (request.args if request.method == 'GET' else request.form)\ .get('url', None) if not url: flash(gettext("Couldn't add feed: url missing."), "error") raise BadRequest("url is missing") feed_exists = list(feed_contr.read(__or__={'link': url, 'site_link': url})) if feed_exists: flash(gettext("Couldn't add feed: feed already exists."), "warning") return redirect(url_for('feed.form', feed_id=feed_exists[0].id)) try: feed = construct_feed_from(url) except requests.exceptions.ConnectionError: flash(gettext("Impossible to connect to the address: {}.".format(url)), "danger") return redirect(url_for('home')) except Exception: logger.exception('something bad happened when fetching %r', url) return redirect(url_for('home')) if not feed.get('link'): feed['enabled'] = False flash(gettext("Couldn't find a feed url, you'll need to find a Atom or" " RSS link manually and reactivate this feed"), 'warning') feed = feed_contr.create(**feed) flash(gettext('Feed was successfully created.'), 'success') if feed.enabled and conf.CRAWLING_METHOD == "classic": utils.fetch(g.user.id, feed.id) flash(gettext("Downloading articles for the new feed..."), 'info') return redirect(url_for('feed.form', feed_id=feed.id))
def popular(): """ Return the most popular feeds for the last nb_days days. """ # try to get the 'recent' popular websites, created after # 'not_created_before' # ie: not_added_before = date_last_added_feed - nb_days try: nb_days = int(request.args.get('nb_days', 365)) except ValueError: nb_days = 10000 last_added_feed = FeedController().read().\ order_by(desc('created_date')).limit(1).all() if last_added_feed: date_last_added_feed = last_added_feed[0].created_date else: date_last_added_feed = datetime.now() not_added_before = date_last_added_feed - timedelta(days=nb_days) filters = {} filters['created_date__gt'] = not_added_before filters['private'] = False filters['error_count__lt'] = conf.DEFAULT_MAX_ERROR feeds = FeedController().count_by_link(**filters) sorted_feeds = sorted(list(feeds.items()), key=operator.itemgetter(1), reverse=True) return render_template('popular.html', popular=sorted_feeds)
def delete(feed_id=None): feed_contr = FeedController(current_user.id) feed = feed_contr.get(id=feed_id) feed_contr.delete(feed_id) flash( gettext("Feed %(feed_title)s successfully deleted.", feed_title=feed.title), 'success') return redirect(url_for('home'))
def reset_errors(feed_id): feed_contr = FeedController(current_user.id) feed = feed_contr.get(id=feed_id) feed_contr.update({'id': feed_id}, {'error_count': 0, 'last_error': ''}) flash( gettext('Feed %(feed_title)r successfully updated.', feed_title=feed.title), 'success') return redirect(request.referrer or url_for('home'))
def reset_feeds(): contr = FeedController() step = timedelta(seconds=3600 / contr.read().count()) now = datetime.utcnow() for i, feed in enumerate(contr.read() .order_by(contr._db_cls.last_retrieved)): contr.update({'id': feed.id}, {'etag': '', 'last_modified': '', 'last_retrieved': now - i * step})
async def parse_feed(user, feed): """ Fetch a feed. Update the feed and return the articles. """ parsed_feed = None up_feed = {} articles = [] resp = None #with (await sem): try: logger.info('Retrieving feed {}'.format(feed.link)) resp = await jarr_get(feed.link, timeout=5) except Exception as e: logger.info('Problem when reading feed {}'.format(feed.link)) return finally: if None is resp: return try: content = io.BytesIO(resp.content) parsed_feed = feedparser.parse(content) except Exception as e: up_feed['last_error'] = str(e) up_feed['error_count'] = feed.error_count + 1 logger.exception("error when parsing feed: " + str(e)) finally: up_feed['last_retrieved'] = datetime.now(dateutil.tz.tzlocal()) if parsed_feed is None: try: FeedController().update({'id': feed.id}, up_feed) except Exception as e: logger.exception('something bad here: ' + str(e)) return if not is_parsing_ok(parsed_feed): up_feed['last_error'] = str(parsed_feed['bozo_exception']) up_feed['error_count'] = feed.error_count + 1 FeedController().update({'id': feed.id}, up_feed) return if parsed_feed['entries'] != []: articles = parsed_feed['entries'] up_feed['error_count'] = 0 up_feed['last_error'] = "" # Feed information try: construct_feed_from(feed.link, parsed_feed).update(up_feed) except: logger.exception('error when constructing feed: {}'.format(feed.link)) if feed.title and 'title' in up_feed: # do not override the title set by the user del up_feed['title'] FeedController().update({'id': feed.id}, up_feed) return articles
def reset_errors(feed_id): feed_contr = FeedController(current_user.id) feed = feed_contr.get(id=feed_id) feed_contr.update({"id": feed_id}, {"error_count": 0, "last_error": ""}) flash( gettext("Feed %(feed_title)r successfully updated.", feed_title=feed.title), "success", ) return redirect(request.referrer or url_for("home"))
def opml_import(): if request.files.get('opmlfile', None) is None: flash(gettext('Got no file'), 'warning') return redirect(url_for('user.profile')) data = request.files.get('opmlfile', None) try: subscriptions = opml.from_string(data.read()) except: flash(gettext("Couldn't parse file"), 'danger') return redirect(request.referrer) ccontr = CategoryController(current_user.id) fcontr = FeedController(current_user.id) created_count, existing_count, failed_count = 0, 0, 0 categories = {cat.name: cat.id for cat in ccontr.read()} for line in subscriptions: try: link = line.xmlUrl except Exception: failed_count += 1 continue # don't import twice if fcontr.read(link=link).count(): existing_count += 1 continue # handling categories cat_id = None category = getattr(line, 'category', None) if category: if category not in categories: new_category = ccontr.create(name=category) categories[new_category.name] = new_category.id cat_id = categories[category] fcontr.create(title=getattr(line, 'text', None), category_id=cat_id, description=getattr(line, 'description', None), link=link, site_link=getattr(line, 'htmlUrl', None)) created_count += 1 flash( gettext( "Created %(created)d feed ! (%(failed)d import failed, " "%(existing)d were already existing)", created=created_count, failed=failed_count, existing=existing_count), "info") return redirect(url_for('user.profile'))
def feeds(): "Lists the subscribed feeds in a table." art_contr = ArticleController(current_user.id) return render_template('feeds.html', feeds=FeedController(current_user.id).read(), unread_article_count=art_contr.count_by_feed(readed=False), article_count=art_contr.count_by_feed())
def create(self, **attrs): # handling special denorm for article rights assert 'feed_id' in attrs, "must provide feed_id when creating article" feed = FeedController(attrs.get('user_id', self.user_id)).get(id=attrs['feed_id']) if 'user_id' in attrs: assert feed.user_id == attrs['user_id'] or self.user_id is None, \ "no right on feed %r" % feed.id attrs['user_id'], attrs['category_id'] = feed.user_id, feed.category_id # handling feed's filters for filter_ in feed.filters or []: match = False if filter_.get('type') == 'regex': match = re.match(filter_['pattern'], attrs.get('title', '')) elif filter_.get('type') == 'simple match': match = filter_['pattern'] in attrs.get('title', '') take_action = match and filter_.get('action on') == 'match' \ or not match and filter_.get('action on') == 'no match' if not take_action: continue if filter_.get('action') == 'mark as read': attrs['readed'] = True logger.warn("article %s will be created as read", attrs['link']) elif filter_.get('action') == 'mark as favorite': attrs['like'] = True logger.warn("article %s will be created as liked", attrs['link']) return super().create(**attrs)
def get_menu(): categories_order = [0] categories = {0: {'name': 'No category', 'id': 0}} for cat in CategoryController(g.user.id).read().order_by('name'): categories_order.append(cat.id) categories[cat.id] = cat.dump() unread = ArticleController(g.user.id).count_by_feed(readed=False) for cat_id in categories: categories[cat_id]['unread'] = 0 categories[cat_id]['feeds'] = [] feeds = {feed.id: feed.dump() for feed in FeedController(g.user.id).read()} for feed_id, feed in feeds.items(): feed['created_stamp'] = timegm(feed['created_date'].timetuple()) * 1000 feed['last_stamp'] = timegm(feed['last_retrieved'].timetuple()) * 1000 feed['category_id'] = feed['category_id'] or 0 feed['unread'] = unread.get(feed['id'], 0) if not feed['filters']: feed['filters'] = [] if feed.get('icon_url'): feed['icon_url'] = url_for('icon.icon', url=feed['icon_url']) categories[feed['category_id']]['unread'] += feed['unread'] categories[feed['category_id']]['feeds'].append(feed_id) return jsonify( **{ 'feeds': feeds, 'categories': categories, 'categories_order': categories_order, 'crawling_method': conf.CRAWLING_METHOD, 'max_error': conf.DEFAULT_MAX_ERROR, 'error_threshold': conf.ERROR_THRESHOLD, 'is_admin': g.user.is_admin(), 'all_unread_count': sum(unread.values()) })
def get_article(article_id, parse=False): locale = get_locale() contr = ArticleController(current_user.id) article = contr.get(id=article_id) if not article.readed: article['readed'] = True contr.update({'id': article_id}, {'readed': True}) article['category_id'] = article.category_id or 0 feed = FeedController(current_user.id).get(id=article.feed_id) article['icon_url'] = url_for('icon.icon', url=feed.icon_url) \ if feed.icon_url else None readability_available = bool(current_user.readability_key or conf.PLUGINS_READABILITY_KEY) article['date'] = format_datetime(localize(article.date), locale=locale) article['readability_available'] = readability_available if parse or (not article.readability_parsed and feed.readability_auto_parse and readability_available): try: new_content = readability.parse( article.link, current_user.readability_key or conf.PLUGINS_READABILITY_KEY) except Exception as error: flash("Readability failed with %r" % error, "error") article['readability_parsed'] = False else: article['readability_parsed'] = True article['content'] = clean_urls(new_content, article['link']) new_attr = {'readability_parsed': True, 'content': new_content} contr.update({'id': article['id']}, new_attr) return article
def home(): """Displays the home page of the connected user. """ art_contr = ArticleController(current_user.id) unread = art_contr.count_by_feed(readed=False) nb_unread = art_contr.read_light(readed=False).count() feeds = { feed.id: feed for feed in sorted( current_user.feeds, key=lambda x: x.title.lower(), reverse=False ) } filter_ = request.args.get("filter_", "unread") feed_id = int(request.args.get("feed", 0)) liked = int(request.args.get("liked", 0)) == 1 limit = request.args.get("limit", 1000) filters = {} if filter_ in ["read", "unread"]: filters["readed"] = filter_ == "read" if feed_id: filters["feed_id"] = feed_id if liked: filters["like"] = int(liked) == 1 articles = art_contr.read_ordered(**filters) if limit != "all": limit = int(limit) articles = articles.limit(limit) in_error = { feed.id: feed.error_count for feed in FeedController(current_user.id).read(error_count__gt=0).all() } def gen_url(filter_=filter_, limit=limit, feed=feed_id, liked=liked): return "?filter_=%s&limit=%s&feed=%d&liked=%s" % ( filter_, limit, feed, 1 if liked else 0, ) return render_template( "home.html", nb_unread=nb_unread, gen_url=gen_url, feed_id=feed_id, filter_=filter_, limit=limit, feeds=feeds, liked=liked, unread=dict(unread), articles=articles.all(), in_error=in_error, )
def export(): """ Export feeds to OPML. """ include_disabled = request.args.get("includedisabled", "") == "on" include_private = request.args.get("includeprivate", "") == "on" include_exceeded_error_count = (request.args.get( "includeexceedederrorcount", "") == "on") filter = {} if not include_disabled: filter["enabled"] = True if not include_private: filter["private"] = False if not include_exceeded_error_count: filter["error_count__lt"] = conf.DEFAULT_MAX_ERROR user = UserController(current_user.id).get(id=current_user.id) feeds = FeedController(current_user.id).read(**filter) categories = { cat.id: cat.dump() for cat in CategoryController(user.id).read() } response = make_response( render_template( "opml.xml", user=user, feeds=feeds, categories=categories, now=datetime.now(), )) response.headers["Content-Type"] = "application/xml" response.headers["Content-Disposition"] = "attachment; filename=feeds.opml" return response
def export(): """ Export feeds to OPML. """ include_disabled = request.args.get('includedisabled', '') == 'on' include_private = request.args.get('includeprivate', '') == 'on' include_exceeded_error_count = request.args. \ get('includeexceedederrorcount', '') == 'on' filter = {} if not include_disabled: filter['enabled'] = True if not include_private: filter['private'] = False if not include_exceeded_error_count: filter['error_count__lt'] = conf.DEFAULT_MAX_ERROR user = UserController(current_user.id).get(id=current_user.id) feeds = FeedController(current_user.id).read(**filter) categories = { cat.id: cat.dump() for cat in CategoryController(user.id).read() } response = make_response( render_template('opml.xml', user=user, feeds=feeds, categories=categories, now=datetime.now())) response.headers['Content-Type'] = 'application/xml' response.headers['Content-Disposition'] = 'attachment; filename=feeds.opml' return response
def form(feed_id=None): action = gettext("Add a feed") categories = CategoryController(current_user.id).read() head_titles = [action] if feed_id is None: form = AddFeedForm() form.set_category_choices(categories) return render_template("edit_feed.html", action=action, head_titles=head_titles, form=form) feed = FeedController(current_user.id).get(id=feed_id) form = AddFeedForm(obj=feed) form.set_category_choices(categories) action = gettext("Edit feed") head_titles = [action] if feed.title: head_titles.append(feed.title) return render_template( "edit_feed.html", action=action, head_titles=head_titles, categories=categories, form=form, feed=feed, )
def retrieve_feed(loop, user, feed_id=None): """ Launch the processus. """ logger.info('Starting to retrieve feeds for {}'.format(user.nickname)) # Get the list of feeds to fetch filters = {} filters['user_id'] = user.id if feed_id is not None: filters['id'] = feed_id filters['enabled'] = True filters['error_count__lt'] = conf.DEFAULT_MAX_ERROR filters['last_retrieved__lt'] = datetime.now() - \ timedelta(minutes=conf.FEED_REFRESH_INTERVAL) feeds = FeedController().read(**filters).all() if feeds == []: logger.info('No feed to retrieve for {}'.format(user.nickname)) return # Launch the process for all the feeds tasks = [asyncio.ensure_future(init_process(user, feed)) for feed in feeds] try: loop.run_until_complete(asyncio.wait(tasks)) except Exception: logger.exception('an error occured') finally: logger.info('Articles retrieved for {}'.format(user.nickname))
def _articles_to_json(articles, fd_hash=None): now, locale = datetime.now(), get_locale() fd_hash = { feed.id: { "title": feed.title, "icon_url": url_for("icon.icon", url=feed.icon_url) if feed.icon_url else None, } for feed in FeedController(current_user.id).read() } return { "articles": [ { "title": art.title, "liked": art.like, "read": art.readed, "article_id": art.id, "selected": False, "feed_id": art.feed_id, "category_id": art.category_id or 0, "feed_title": fd_hash[art.feed_id]["title"] if fd_hash else None, "icon_url": fd_hash[art.feed_id]["icon_url"] if fd_hash else None, "date": format_datetime(localize(art.date), locale=locale), "rel_date": format_timedelta( art.date - now, threshold=1.1, add_direction=True, locale=locale ), } for art in articles.limit(1000) ] }
async def retrieve_feed(queue, users, feed_id=None): """ Launch the processus. """ for user in users: logger.info("Starting to retrieve feeds for {}".format(user.nickname)) filters = {} filters["user_id"] = user.id if feed_id is not None: filters["id"] = feed_id filters["enabled"] = True filters["error_count__lt"] = conf.DEFAULT_MAX_ERROR filters["last_retrieved__lt"] = datetime.now() - timedelta( minutes=conf.FEED_REFRESH_INTERVAL ) feeds = FeedController().read(**filters).all() if feeds == []: logger.info("No feed to retrieve for {}".format(user.nickname)) for feed in feeds: articles = await parse_feed(user, feed) await queue.put((user, feed, articles)) await queue.put(None)
def test_feed_rights(self): cat = CategoryController(2).read()[0].dump() self.assertTrue(3, ArticleController().read(category_id=cat['id']).count()) self.assertTrue(3, FeedController().read(category_id=cat['id']).count()) self._test_controller_rights(cat, UserController().get(id=cat['user_id']))
def _reset_feeds_freshness(self, **kwargs): if 'last_retrieved' not in kwargs: kwargs['last_retrieved'] = datetime(1970, 1, 1) if 'etag' not in kwargs: kwargs['etag'] = '' if 'last_modified' not in kwargs: kwargs['last_modified'] = '' FeedController().update({}, kwargs)
def test_feed_and_article_deletion(self): ccontr = CategoryController(2) cat = ccontr.read()[0].dump() ccontr.delete(cat['id']) self.assertEquals(0, ArticleController().read(category_id=cat['id']).count()) self.assertEquals(0, FeedController().read(category_id=cat['id']).count())
def inactives(): """ List of inactive feeds. """ nb_days = int(request.args.get('nb_days', 365)) inactives = FeedController(current_user.id).get_inactives(nb_days) return render_template('inactives.html', inactives=inactives, nb_days=nb_days)
def opml_import(): if request.files.get('opmlfile', None) is None: flash(gettext('Got no file'), 'warning') return redirect(url_for('user.profile')) data = request.files.get('opmlfile', None) try: subscriptions = opml.from_string(data.read()) except: flash(gettext("Couldn't parse file"), 'danger') return redirect(request.referrer) ccontr = CategoryController(current_user.id) fcontr = FeedController(current_user.id) created_count, existing_count, failed_count = 0, 0, 0 categories = {cat.name: cat.id for cat in ccontr.read()} for line in subscriptions: try: link = line.xmlUrl except Exception: failed_count += 1 continue # don't import twice if fcontr.read(link=link).count(): existing_count += 1 continue # handling categories cat_id = None category = getattr(line, 'category', None) if category: if category not in categories: new_category = ccontr.create(name=category) categories[new_category.name] = new_category.id cat_id = categories[category] fcontr.create(title=getattr(line, 'text', None), category_id=cat_id, description=getattr(line, 'description', None), link=link, site_link=getattr(line, 'htmlUrl', None)) created_count += 1 flash(gettext("Created %(created)d feed ! (%(failed)d import failed, " "%(existing)d were already existing)", created=created_count, failed=failed_count, existing=existing_count), "info") return redirect(url_for('user.profile'))
def feed_pub(feed_id=None): """ Presents details of a pubic feed if the profile of the owner is also public. """ feed = FeedController(None).get(id=feed_id) if feed.private or not feed.user.is_public_profile: return render_template('errors/404.html'), 404 return feed_view(feed_id, None)
async def parse_feed(user, feed): """ Fetch a feed. Update the feed and return the articles. """ parsed_feed = None up_feed = {} articles = [] with (await sem): try: parsed_feed = await get(feed.link) except Exception as e: up_feed['last_error'] = str(e) up_feed['error_count'] = feed.error_count + 1 logger.exception("error when parsing feed: " + str(e)) finally: up_feed['last_retrieved'] = datetime.now(dateutil.tz.tzlocal()) if parsed_feed is None: try: FeedController().update({'id': feed.id}, up_feed) except Exception as e: logger.exception('something bad here: ' + str(e)) return if not is_parsing_ok(parsed_feed): up_feed['last_error'] = str(parsed_feed['bozo_exception']) up_feed['error_count'] = feed.error_count + 1 FeedController().update({'id': feed.id}, up_feed) return if parsed_feed['entries'] != []: articles = parsed_feed['entries'] up_feed['error_count'] = 0 up_feed['last_error'] = "" # Feed information construct_feed_from(feed.link, parsed_feed).update(up_feed) if feed.title and 'title' in up_feed: # do not override the title set by the user del up_feed['title'] FeedController().update({'id': feed.id}, up_feed) return articles
def config(self): print("graph_title JARR - Articles adding rate") print("graph_vlabel Articles per sec") print("articles.label Overall rate") print("articles.type DERIVE") print("articles.min 0") fcontr = FeedController(ignore_context=True) last_conn_max = datetime.utcnow() - timedelta(days=30) for id_ in fcontr.read()\ .join(User).filter(User.is_active == True, User.last_connection >= last_conn_max)\ .with_entities(fcontr._db_cls.user_id)\ .distinct().order_by('feed_user_id'): id_ = id_[0] print("articles_user_%s.label Rate for user %s" % (id_, id_)) print("articles_user_%s.type DERIVE" % id_) print("articles_user_%s.min 0" % id_) print("graph_category web") print("graph_scale yes")
def post_preprocessor(self, data=None, **kw): data["user_id"] = current_user.id try: feed = FeedController(current_user.id).get(id=data["feed_id"]) except NotFound: raise ProcessingException(description='No such feed.', code=404) self.is_authorized(current_user, feed) data["category_id"] = feed.category_id
def duplicates(feed_id): """ Return duplicates article for a feed. """ feed, duplicates = FeedController(current_user.id).get_duplicates(feed_id) if len(duplicates) == 0: flash(gettext('No duplicates in the feed "{}".').format(feed.title), 'info') return redirect(url_for('home')) return render_template('duplicates.html', duplicates=duplicates, feed=feed)
def list_(): "Lists the subscribed feeds in a table." art_contr = ArticleController(current_user.id) return render_template( 'categories.html', categories=list( CategoryController(current_user.id).read().order_by('name')), feeds_count=FeedController(current_user.id).count_by_category(), unread_article_count=art_contr.count_by_category(readed=False), article_count=art_contr.count_by_category())
def process_form(feed_id=None): form = AddFeedForm() feed_contr = FeedController(current_user.id) form.set_category_choices(CategoryController(current_user.id).read()) if not form.validate(): return render_template('edit_feed.html', form=form) existing_feeds = list(feed_contr.read(link=form.link.data)) if existing_feeds and feed_id is None: flash(gettext("Couldn't add feed: feed already exists."), "warning") return redirect(url_for('feed.form', feed_id=existing_feeds[0].id)) # Edit an existing feed feed_attr = {'title': form.title.data, 'enabled': form.enabled.data, 'link': form.link.data, 'site_link': form.site_link.data, 'filters': [], 'category_id': form.category_id.data, 'private': form.private.data} if not feed_attr['category_id'] or feed_attr['category_id'] == '0': del feed_attr['category_id'] for filter_attr in ('type', 'pattern', 'action on', 'action'): for i, value in enumerate( request.form.getlist(filter_attr.replace(' ', '_'))): if i >= len(feed_attr['filters']): feed_attr['filters'].append({}) feed_attr['filters'][i][filter_attr] = value if feed_id is not None: feed_contr.update({'id': feed_id}, feed_attr) flash(gettext('Feed %(feed_title)r successfully updated.', feed_title=feed_attr['title']), 'success') return redirect(url_for('feed.form', feed_id=feed_id)) # Create a new feed new_feed = feed_contr.create(**feed_attr) flash(gettext('Feed %(feed_title)r successfully created.', feed_title=new_feed.title), 'success') if conf.CRAWLING_METHOD == "classic": misc_utils.fetch(current_user.id, new_feed.id) flash(gettext("Downloading articles for the new feed..."), 'info') return redirect(url_for('feed.form', feed_id=new_feed.id))
def test_feed_article_deletion(self): feed_ctr = FeedController(2) feed = feed_ctr.read()[0].dump() feed_ctr.delete(feed['id']) self.assertFalse(0, ArticleController().read(feed_id=feed['id']).count())
def execute(self): delta = timedelta(minutes=LATE_AFTER + FETCH_RATE + 1) fcontr = FeedController(ignore_context=True) print("feeds.value %d" % len(list(fcontr.list_late(delta, limit=0)))) print("feeds_total.value %d" % fcontr.read().count())
def test_create_using_filters(self): feed_ctr = FeedController(2) feed1 = feed_ctr.read()[0].dump() feed2 = feed_ctr.read()[1].dump() feed3 = feed_ctr.read()[2].dump() feed_ctr.update({'id': feed1['id']}, {'filters': [{"type": "simple match", "pattern": "no see pattern", "action on": "match", "action": "mark as read"}]}) feed_ctr.update({'id': feed3['id']}, {'filters': [{"type": "regex", "pattern": ".*(pattern1|pattern2).*", "action on": "no match", "action": "mark as favorite"}, {"type": "simple match", "pattern": "no see pattern", "action on": "match", "action": "mark as read"}]}) art1 = ArticleController(2).create( entry_id="thisisnotatest", feed_id=feed1['id'], title="garbage no see pattern garbage", content="doesn't matter", link="doesn't matter either") art2 = ArticleController(2).create( entry_id="thisisnotatesteither", feed_id=feed1['id'], title="garbage see pattern garbage", content="doesn't matter2", link="doesn't matter either2") art3 = ArticleController(2).create( entry_id="thisisnotatest", user_id=2, feed_id=feed2['id'], title="garbage no see pattern garbage", content="doesn't matter", link="doesn't matter either") art4 = ArticleController(2).create( entry_id="thisisnotatesteither", user_id=2, feed_id=feed2['id'], title="garbage see pattern garbage", content="doesn't matter2", link="doesn't matter either2") art5 = ArticleController(2).create( entry_id="thisisnotatest", feed_id=feed3['id'], title="garbage pattern1 garbage", content="doesn't matter", link="doesn't matter either") art6 = ArticleController(2).create( entry_id="thisisnotatesteither", feed_id=feed3['id'], title="garbage pattern2 garbage", content="doesn't matter2", link="doesn't matter either2") art7 = ArticleController(2).create( entry_id="thisisnotatesteither", feed_id=feed3['id'], title="garbage no see pattern3 garbage", content="doesn't matter3", link="doesn't matter either3") art8 = ArticleController(2).create( entry_id="thisisnotatesteither", feed_id=feed3['id'], title="garbage pattern4 garbage", content="doesn't matter4", link="doesn't matter either4") self.assertTrue(art1.readed) self.assertFalse(art1.like) self.assertFalse(art2.readed) self.assertFalse(art2.like) self.assertFalse(art3.readed) self.assertFalse(art3.like) self.assertFalse(art4.readed) self.assertFalse(art4.like) self.assertFalse(art5.readed) self.assertFalse(art5.like) self.assertFalse(art6.readed) self.assertFalse(art6.like) self.assertTrue(art7.readed) self.assertTrue(art7.like) self.assertFalse(art8.readed) self.assertTrue(art8.like)