def get_latest_episode(self, feed, ttl=None):
        storage = shelve.open(self.storage_path)
        latest_entry = None

        if ttl is None:
            ttl = self.ttl

        try:
            fc = cache.Cache(storage, timeToLiveSeconds=ttl)
            parsed_data = fc.fetch(feed["rss_url"])
            # print "parsed_data.feed.title:", parsed_data.feed.title
            for entry in parsed_data.entries:
                latest_entry = entry
                break
        finally:
            storage.close()

        if latest_entry is None:
            self.errors.append({
                "dialog": "no.latest.episode",
                "data": {
                    "feed": feed
                }
            })
        return latest_entry
Exemple #2
0
def getLatestNews():
    ret = []
    storage = shelve.open(cacheFile)
    url = ''
    summary = ''
    title = ''
    datepub = ''
    try:
        fc = cache.Cache(storage, timeToLiveSeconds=ttl)
        data = fc.fetch(feed)
        for entry in data.entries:
            try:
                url = unicode(entry.link, channels.encoding)
                summary = unicode(entry.description, channels.encoding)
                title = unicode(entry.title, channels.encoding)
            except:
                url = entry.link
                summary = entry.description
                title = entry.title

            date = entry.updated_parsed
            datestr = time.strftime('%d/%m/%Y', date)
            ret.append({
                'url': url,
                'summary': summary,
                'title': title,
                'date': datestr
            })

    except Exception, e:
        #nothing
        logging.warn(e)
        ret = []
Exemple #3
0
def latest_tweets(twitter_url):
    output = []
    try:
        storage = shelve.open('/var/tmp/.feedcache')
        c = cache.Cache(storage)
        f = c.fetch(twitter_url)
    except:
        f = feedparser.parse(twitter_url)
    for entry in f.entries:
        t = Tweet(date=datetime.date(entry.updated_parsed[0],
                                     entry.updated_parsed[1],
                                     entry.updated_parsed[2]),
                  url=entry.link,
                  text=entry.title.split(":", 1)[1])
        output.append(t)
    return output
Exemple #4
0
 def get_latest_episode(self, url, media=False):
     storage_path = join(self.file_system.path, 'feedcache')
     LOGGER.debug("storage_path:%s" % storage_path)
     storage = shelve.open(storage_path)
     ttl = 60 * 60
     link = ""
     try:
         fc = cache.Cache(storage, timeToLiveSeconds=ttl)
         parsed_data = fc.fetch(url)
         print "parsed_data.feed.title:", parsed_data.feed.title
         for entry in parsed_data.entries:
             pprint(entry)
             if media:
                 media_content = entry.media_content
                 if media_content:
                     link = entry.media_content[0]['url']
             else:
                 link = entry.link
             if link:
                 break
     finally:
         storage.close()
     return link
    def get_feed_data(self, feed, ttl=None):
        # This fetches the feed and gets the title from the rss feed itself.
        if ttl is None:
            ttl = self.ttl

        storage = shelve.open(self.storage_path)
        title = None
        href = None
        rss_url = feed.get("rss_url")
        if not rss_url:
            self.errors.append({"dialog": "empty.rss.url", "data": feed})
            return title, href

        try:
            fc = cache.Cache(storage, timeToLiveSeconds=ttl)
            parsed_data = fc.fetch(rss_url)
            if parsed_data.feed.title:
                title = parsed_data.feed.title
            if parsed_data.feed.link:
                href = parsed_data.feed.link
        finally:
            storage.close()

        return title, href