elem.tail = i # Set the feed/channel level properties feed.feed["title"] = "Example feed" feed.feed["link"] = "http://www.example.com" feed.feed["author"] = "Mr X. Ample" feed.feed["description"] = "A simple example feed with one item in it" # Create an item item = {} item["title"] = "Test item" item["link"] = "http://www.example.com/example_url" item["description"] = "And now for something completely different" item["pubDate"] = time.localtime() item["guid"] = "1234567890" # Add item to feed # You can do this as many times as you like with multiple items feed.items.append(item) # Print the feed to stdout in various formats print(feed.format_rss1_string()) print(feed.format_rss2_string()) print(feed.format_atom_string()) # Save the feed to a file in various formats feed.format_rss1_file("example_feed_rss1.xml") feed.format_rss2_file("example_feed_rss2.xml") feed.format_atom_file("example_feed_atom.xml")
# You can do this as many times as you like with multiple items i = 0 while i < len(notices): feed.items.append(notices[i]) i = i + 1 i = 0 while (i < len(origin_feed.entries)): str1 = origin_feed.entries[i].published str2 = re.sub(r"[+-]([0-9])+", "", str1) dt = datetime.datetime.strptime(str2, '%a, %d %b %Y %H:%M:%S ') tm = dt.timetuple() origin_feed.entries[i].published = tm origin_feed.entries[i].updated = tm feed.items.append(origin_feed.entries[i]) i = i + 1 # Print the feed to stdout in various formats #print feed.format_rss1_string() #print feed.format_rss2_string() #print feed.format_atom_string() # Save the feed to a file in various formats #feed.format_rss1_file("example_feed_rss1.xml") feed.format_rss2_file("rss2.xml") import os cmd = 'cp rss2.xml /var/www' os.system(cmd)
'dateFormatted': today.strftime('%d %b %Y'), 'date': today.strftime('%Y-%m-%d') } # Create the feed. feed = Feed() # Set the feed/channel level properties. feed.feed['title'] = 'Sinfest RSS' feed.feed['link'] = 'http://www.sinfest.net' feed.feed['author'] = 'Tatsuya Ishida' feed.feed['description'] = 'RSS feed for Sinfest' # Create an item. # For this basic feed, I'll only include the latest comic. item = {} item['link'] = todaysSinfest['url'] item['guid'] = todaysSinfest['date'] item["pubDate"] = time.localtime() item['title'] = 'Sinfest for %s: %s' % (todaysSinfest['dateFormatted'], todaysSinfest['title']) if todaysSinfest['imageUrl'] != '': item['summary'] = '<img src="%s" />' % (todaysSinfest['imageUrl']) else: item['summary'] = 'image not found' # Add item to feed. feed.items.append(item) # Save the feed to a file. feed.format_rss2_file('rss2.xml')
} log.debug("=== begin ===") log.debug(tpl.render(data)) log.debug("=== end ===") # Add new item to RSS feed item = {} item['title'] = alert.getSeverityTag() item['link'] = 'https://druid' item['description'] = alert.numEventsStr() item['pubDate'] = time.localtime() log.debug("Adding new RSS feed item: " + pprint.pformat(item)) feed.items.append(item) feed.format_rss2_file(FEED_RSS) if feed_rss: fcntl.flock(feed_rss, fcntl.LOCK_UN) feed_rss.close() if alert.getSeverityTag() in ['CRITICAL', 'HIGH']: # Push through Pushbullet API_KEY = '' sys.path.insert(0, '/usr/lib/python2.6/site-packages') import pushybullet as pb api = pb.PushBullet(API_KEY) title = '%s: %s' % (alert.getSeverityTag(), alert.numEventsStr()) link = pb.LinkPush('https://druid', title)
'http://ej.iop.org/rss/0953-8984/latestpapers.xml', 'http://www.nature.com/nphys/current_issue/rss/', 'http://feeds.aps.org/rss/recent/pra.xml', 'http://feeds.aps.org/rss/recent/prb.xml', 'http://feeds.aps.org/rss/recent/pre.xml', 'http://feeds.aps.org/rss/recent/prl.xml', 'http://feeds.aps.org/rss/recent/focus.xml', 'http://feeds.aps.org/rss/recent/physics.xml', 'http://feeds.aps.org/rss/recent/rmp.xml', 'http://www.sciencemag.org/rss/current.xml' ] # Get matching entries keywords = [keyword.lower() for keyword in keywords] for source in sources: for entry in feedparser.parse(source).entries: title = entry['title'].lower().replace('-', ' ') summary = entry['summary'].lower().replace('-', ' ') if any([keyword in title + ' ' + summary for keyword in keywords]): item = entry if 'pubDate' in entry.keys(): item['pubDate'] = entry['pubDate'] elif 'updated' in entry.keys(): item['pubDate'] = entry['updated_parsed'] else: item['pubDate'] = time.localtime() feed.items.append(item) # Save the feed feed.format_rss2_file("feelter.rss")
i = 0 while i < len(notices) : feed.items.append(notices[i]) i = i + 1 i = 0 while ( i < len(origin_feed.entries) ): str1=origin_feed.entries[i].published str2=re.sub(r"[+-]([0-9])+", "", str1) dt = datetime.datetime.strptime(str2, '%a, %d %b %Y %H:%M:%S ') tm = dt.timetuple() origin_feed.entries[i].published = tm origin_feed.entries[i].updated = tm feed.items.append(origin_feed.entries[i]) i = i + 1 # Print the feed to stdout in various formats #print feed.format_rss1_string() #print feed.format_rss2_string() #print feed.format_atom_string() # Save the feed to a file in various formats #feed.format_rss1_file("example_feed_rss1.xml") feed.format_rss2_file("rss2.xml") import os cmd = 'cp rss2.xml /var/www' os.system(cmd)