Example #1
0
    def fetch_apropos_news(self):
        """ Retrieves the apropos blog entries and returns them. """
        current_lang = translation.get_language()
        feed_url = (
            "https://apropos.erudit.org/en/erudit-en/blog/feed/"
            if current_lang == "en"
            else "https://apropos.erudit.org/fr/erudit/blogue/feed/"
        )

        entries_cache_key = "apropos-feed-{lang}".format(lang=current_lang)

        # Tries to fetch previously stored entries
        entries = cache.get(entries_cache_key, None)

        if entries is None:
            # Fetches the blog entries
            try:
                parsed = rss_parse(feed_url)
                assert parsed.get("status") == 200
            except AssertionError:
                # The feed is not available.
                logger.error(
                    "Apropos feeds unavailable ({})".format(feed_url), exc_info=True, extra={"request": self.request}
                )
                return []
            entries = parsed.get("entries", [])[:6]

            # Stores the entries in the cache
            cache.set(entries_cache_key, entries, 60 * 60)  # 1 hour

        return entries
Example #2
0
    def fetch_apropos_news(self):
        """ Retrieves the apropos blog entries and returns them. """
        current_lang = translation.get_language()
        feed_url = 'https://apropos.erudit.org/en/erudit-en/blog/feed/' if current_lang == 'en' \
            else 'https://apropos.erudit.org/fr/erudit/blogue/feed/'

        entries_cache_key = 'apropos-feed-{lang}'.format(lang=current_lang)

        # Tries to fetch previously stored entries
        entries = cache.get(entries_cache_key, None)

        if entries is None:
            # Fetches the blog entries
            try:
                parsed = rss_parse(feed_url)
                status_code = parsed.get('status')
                assert status_code == 200 or status_code == 304
            except AssertionError:
                # The feed is not available.
                logger.error('Apropos feeds unavailable ({})'.format(feed_url),
                             exc_info=True, extra={'request': self.request, })
                return []
            entries = parsed.get('entries', [])[:6]

            # Converts the 'published' time struct to a datetime object
            for item in entries:
                item['dt_published'] = dt.datetime.fromtimestamp(time.mktime(item.published_parsed))

            # Stores the entries in the cache
            cache.set(entries_cache_key, entries, 60 * 60)  # 1 hour

        return entries