Exemplo n.º 1
0
    def parse_rss(self, channel):
        """Parse a RSS file"""
        self.feed = RSSHandler()
        for feed in channel['feeds']:
            filename = '%s%s.xml' % (DATA_DIR, local_filename(feed['url']))
            show_descrip = feed['descrip']
            nick = feed['nick']
            try:
                self.feed.load_feed(filename, show_descrip, nick)
            except:
                if not self.quiet:
                    print 'WARNING: Could not parse %s' % filename

        if (len(channel['feeds'])) > 1:
            self.feed.sort_by_date()
Exemplo n.º 2
0
class Kirss:
    def __init__(self, quiet=False):
        self.quiet = quiet

        # this will keep the info on all the channels
        # it will be initialized when the channels are parsed
        self.channels = None

        # this will keep the current channel info
        # along with the items in the channel
        self.feed = None

        self.right = False

        self.channel_file = CHANNEL_FILE
        self.last_update_file = '%slast_update.dat' % DATA_DIR

        if os.path.isdir(KIRSS_DIR) == False:
            os.mkdir(KIRSS_DIR)

        if os.path.isdir(DATA_DIR) == False:
            os.mkdir(DATA_DIR)

        # load the last update times if possible
        try:
            self.last_update = pickle.load(open(self.last_update_file))
        except:
            self.last_update = {}

        self.parse_channels()

    def parse_all(self):
        """Parse everything and write the output HTML file"""
        f = open('%srss.htm' % KIRSS_DIR, 'w')
        self.make_header(f)
        for channel in self.channels:
            self.curr_channel = channel
            self.parse_rss(channel)
            self.make_html(f, channel)
        self.make_footer(f)
        f.close()

    def parse_rss(self, channel):
        """Parse a RSS file"""
        self.feed = RSSHandler()
        for feed in channel['feeds']:
            filename = '%s%s.xml' % (DATA_DIR, local_filename(feed['url']))
            show_descrip = feed['descrip']
            nick = feed['nick']
            try:
                self.feed.load_feed(filename, show_descrip, nick)
            except:
                if not self.quiet:
                    print 'WARNING: Could not parse %s' % filename

        if (len(channel['feeds'])) > 1:
            self.feed.sort_by_date()

    def make_header(self, f):
        """Write the header to the output HTML file"""
        header = open_data_file('header.htm')
        f.write(header.read())
        header.close()

    def make_footer(self, f):
        """Write the footer to the output HTML file"""
        footer = open_data_file('footer.htm')
        f.write(footer.read())
        footer.close()

    def make_nick(self, nick):
        if nick is None or len(nick) == 0:
            return ''

        return '[%s] ' % esc(nick)

    def make_title(self, name, url):
        if url is None or len(url) == 0:
            return '%s' % esc(name)

        return '<a href="%s">%s</a>' % (esc(url), esc(name))

    def make_html(self, f, channel):
        """Convert the RSS data into HTML form"""
        if len(self.feed.items) > 0:
            #try:
            if channel['width'] == 'half':
                if self.right:
                    f.write('\t\t<div class="rss_channel c2">\n')
                else:
                    f.write('\t\t<div class="rss_channel c1">\n')
                self.right = not self.right
            else:
                f.write('\t\t<div class="rss_channel cfull">\n')
            f.write('\t\t\t<div class="rss_title">%s</div>\n' % self.make_title(channel['name'], channel['url']))
            for i, item in enumerate(self.feed.items):
                if channel['num'] > 0 and channel['num'] > i:
                    f.write('\t\t\t<div class="rss_item">%s<a href="%s">%s</a></div>\n' % (self.make_nick(item['nick']), esc(item['link']), esc(item['title'])))
                    if item['description'] != '':
                        f.write('\t\t\t<div class="rss_body">%s</div>\n' % item['description'].encode('utf-8'))
            f.write('\t\t</div>\n')
            # an IndexError will occur if the RSS feed was not able to be downloaded or parsed
            # TODO maybe not
            #except IndexError:
            #    pass
            #except TypeError:
            #    pass

    def download(self, timeout):
        """Download the RSS feeds"""
        setdefaulttimeout(timeout)

        threads = []
        for channel in self.channels:
            for feed in channel['feeds']:
                s = DownloadThread(channel, feed, self.last_update, self.quiet)
                threads.append(s)
                s.start()

        # wait for all threads to finish
        for s in threads:
            s.join()

        self.save_update_times()

    def save_update_times(self):
        """Save the last update time for each feed"""
        pickle.dump(self.last_update, open(self.last_update_file, 'w'))

    def parse_channels(self):
        """Read channels from config file"""
        try:
            self.channels = ChannelHandler(self.channel_file, self.quiet)
        except InvalidChannelException:
            if not self.quiet:
                print 'ERROR: Could not parse %s' % self.channel_file
            exit(1)