def fucking_dinner(jenni, input): '''.fd -- provide suggestions for dinner''' txt = input.group(2) url = 'http://www.whatthefuckshouldimakefordinner.com' if txt == '-v': url = 'http://whatthefuckshouldimakefordinner.com/veg.php' page = web.get(url) results = re_mark.findall(page) if results: dish = results[0][1].upper() long_url = results[0][0] try: short_url = short(long_url)[0][1] except: short_url = long_url jenni.say("WHY DON'T YOU EAT SOME F*****G: " + dish + " HERE IS THE RECIPE: " + short_url) else: jenni.say("I DON'T F*****G KNOW, EAT PIZZA.")
def get_arxiv(query): from modules.url import short url = base_url + request.format(urllib.quote(query)) xml = web.get(url) feed = feedparser.parse(xml) if feed.feed.opensearch_totalresults < 1: raise IndexError # get the first (and only) entry entry = feed.entries[0] abs_link = entry.id arxivid = id_filter.sub(r'\1', abs_link) try: short_url = short(abs_link)[0][1] except: short_url = '' # format the author string # use et al. for 3+ authors if len(entry.authors) > 2: authors = entry.authors[0].name if collab_check.match(authors) is None: authors += ' et al.' elif len(entry.authors) >0: authors = ' and '.join([author.name for author in entry.authors]) else: authors = '' title = entry.title abstract = no_newlines.sub(' ', entry.summary) return (arxivid, authors, title, abstract, short_url)
def get_arxiv(query): url = base_url + request.format(urllib.quote(query)) xml = web.get(url) feed = feedparser.parse(xml) if feed.feed.opensearch_totalresults < 1: raise IndexError # get the first (and only) entry entry = feed.entries[0] abs_link = entry.id arxivid = id_filter.sub(r'\1', abs_link) try: short_url = short(abs_link)[0][1] except: short_url = '' # format the author string # use et al. for 3+ authors if len(entry.authors) > 2: authors = entry.authors[0].name if collab_check.match(authors) is None: authors += ' et al.' elif len(entry.authors) >0: authors = ' and '.join([author.name for author in entry.authors]) else: authors = '' title = entry.title abstract = no_newlines.sub(' ', entry.summary) return (arxivid, authors, title, abstract, short_url)
def read_feeds(jenni): global restarted global STOP restarted = False conn = sqlite3.connect('rss.db') c = conn.cursor() checkdb(c) c.execute("SELECT * FROM rss") if not c.fetchall(): STOP = True jenni.say("No RSS feeds found in database. Please add some rss feeds.") c.execute("SELECT * FROM rss") conn_recent = sqlite3.connect('recent_rss.db') cursor_recent = conn_recent.cursor() cursor_recent.execute("CREATE TABLE IF NOT EXISTS recent ( channel text, site_name text, article_title text, article_url text )") conn_recent.commit() for row in c: feed_channel = row[0] feed_site_name = row[1] feed_url = row[2] feed_fg = row[3] feed_bg = row[4] try: fp = feedparser.parse(feed_url) except: jenni.say("Can't parse.") try: entry = fp.entries[0] except: jenni.say("row: " + str(row)) jenni.say("Can't find element: " + str(fp)) continue if not feed_fg and not feed_bg: site_name_effect = "[\x02%s\x02]" % (feed_site_name) elif feed_fg and not feed_bg: site_name_effect = "[\x02\x03%s%s\x03\x02]" % (feed_fg, feed_site_name) elif feed_fg and feed_bg: site_name_effect = "[\x02\x03%s,%s%s\x03\x02]" % (feed_fg, feed_bg, feed_site_name) try: article_url = entry.link except: print "Something went wrong" print str(entry) continue # only print if new entry sql_text = (feed_channel, feed_site_name, entry.title, article_url) cursor_recent.execute("SELECT * FROM recent WHERE channel = ? AND site_name = ? and article_title = ? AND article_url = ?", sql_text) if len(cursor_recent.fetchall()) < 1: short_url = url_module.short(article_url) try: short_url = short_url[0][1][:-1] except: short_url = article_url response = site_name_effect + " %s \x02%s\x02" % (entry.title, short_url) if entry.updated: response += " - %s" % (entry.updated) jenni.msg(feed_channel, response) t = (feed_channel, feed_site_name, entry.title, article_url,) cursor_recent.execute("INSERT INTO recent VALUES (?, ?, ?, ?)", t) conn_recent.commit() conn.commit() else: if DEBUG: jenni.msg(feed_channel, u"Skipping previously read entry: %s %s" % (site_name_effect, entry.title)) cursor_recent.close() c.close()
def read_feeds(jenni): global restarted global STOP restarted = False conn = sqlite3.connect('rss.db') c = conn.cursor() checkdb(c) c.execute("SELECT * FROM rss") if not c.fetchall(): STOP = True jenni.say("No RSS feeds found in database. Please add some rss feeds.") c.execute("SELECT * FROM rss") conn_recent = sqlite3.connect('recent_rss.db') cursor_recent = conn_recent.cursor() cursor_recent.execute( "CREATE TABLE IF NOT EXISTS recent ( channel text, site_name text, article_title text, article_url text )" ) conn_recent.commit() for row in c: feed_channel = row[0] feed_site_name = row[1] feed_url = row[2] feed_fg = row[3] feed_bg = row[4] try: fp = feedparser.parse(feed_url) except: jenni.say("Can't parse.") try: entry = fp.entries[0] except: if DEBUG: jenni.say("row: " + str(row)) jenni.say("Can't find element: " + str(fp)) continue if not feed_fg and not feed_bg: site_name_effect = "[\x02%s\x02]" % (feed_site_name) elif feed_fg and not feed_bg: site_name_effect = "[\x02\x03%s%s\x03\x02]" % (feed_fg, feed_site_name) elif feed_fg and feed_bg: site_name_effect = "[\x02\x03%s,%s%s\x03\x02]" % (feed_fg, feed_bg, feed_site_name) try: article_url = entry.link except: print "Something went wrong" print str(entry) continue # only print if new entry sql_text = (feed_channel, feed_site_name, entry.title, article_url) cursor_recent.execute( "SELECT * FROM recent WHERE channel = ? AND site_name = ? and article_title = ? AND article_url = ?", sql_text) if len(cursor_recent.fetchall()) < 1: short_url = url_module.short(article_url) try: short_url = short_url[0][1][:-1] except: short_url = article_url if 'j.mp' in short_url or 'bit.ly' in short_url: short_url = short_url.replace('http:', 'https:') response = site_name_effect + " %s \x02%s\x02" % (entry.title, short_url) if hasattr(entry, 'updated') and entry.updated: response += " - %s" % (entry.updated) jenni.msg(feed_channel, response) t = ( feed_channel, feed_site_name, entry.title, article_url, ) cursor_recent.execute("INSERT INTO recent VALUES (?, ?, ?, ?)", t) conn_recent.commit() conn.commit() else: if DEBUG: if hasattr(jenni.config, 'logchan_pm'): jenni.msg( jenni.config.logchan_pm, u"Skipping previously read entry for %s: %s %s" % (feed_channel, site_name_effect, entry.title)) else: jenni.say(u"Skipping previously read entry for %s: %s %s" % (feed_channel, site_name_effect, entry.title)) cursor_recent.close() c.close()
def read_feeds(jenni): global restarted restarted = False conn = sqlite3.connect('rss.db') c = conn.cursor() c.execute("SELECT * FROM rss") for row in c: feed_channel = row[0] feed_site_name = row[1] feed_url = row[2] feed_fg = row[4] feed_bg = row[5] try: fp = feedparser.parse(feed_url) except IOError, E: jenni.say("Can't parse, " + str(E)) try: entry = fp.entries[0] if not feed_fg and not feed_bg: site_name_effect = "[\x02%s\x02]" % (feed_site_name) elif feed_fg and not feed_bg: site_name_effect = "[\x02\x03%s%s\x03\x02]" % (feed_fg, feed_site_name) elif feed_fg and feed_bg: site_name_effect = "[\x02\x03%s,%s%s\x03\x02]" % (feed_fg, feed_bg, feed_site_name) #if not feed_modified == entry.updated: if feed_channel not in dupes: dupes[feed_channel] = dict() if feed_site_name not in dupes[feed_channel]: dupes[feed_channel][feed_site_name] = list() if entry.title not in dupes[feed_channel][feed_site_name]: dupes[feed_channel][feed_site_name].append(entry.title) if entry.id: article_url = entry.id elif entry.feedburner_origlink: article_url = entry.feedburner_origlink else: article_url = entry.links[0].href short_url = url_module.short(article_url) if short_url: short_url = short_url[0][1][:-1] else: short_url = article_url response = site_name_effect + " %s \x02%s\x02" % (entry.title, short_url) if entry.updated: response += " - %s" % (entry.updated) jenni.msg(feed_channel, response) t = (entry.updated, feed_channel, feed_site_name, feed_url,) c.execute("UPDATE rss SET modified = ? WHERE channel = ? AND site_name = ? AND site_url = ?", t) conn.commit() c.close() else: if DEBUG: jenni.msg(feed_channel, u"Skipping previously read entry: %s %s" % (site_name_effect, entry.title)) except Exception, E: if DEBUG: jenni.say(str(E))
def read_feeds(jenni): global restarted global STOP restarted = False if sqlite: conn = sqlite3.connect('rss.db') else: conn = MySQLdb.connect(host=jenni.config.userdb_host, user=jenni.config.userdb_user, passwd=jenni.config.userdb_pass, db=jenni.config.userdb_name) c = conn.cursor() checkdb(c) c.execute("SELECT * FROM rss") if not c.fetchall(): STOP = True jenni.say("No RSS feeds found in database. Please add some rss feeds.") c.execute("SELECT * FROM rss") conn_recent = sqlite3.connect('recent_rss.db') cursor_recent = conn_recent.cursor() cursor_recent.execute("CREATE TABLE IF NOT EXISTS recent ( channel text, site_name text, article_title text, article_url text )") for row in c: feed_channel = row[0] feed_site_name = row[1] feed_url = row[2] feed_fg = row[3] feed_bg = row[4] try: fp = feedparser.parse(feed_url) except IOError, E: jenni.say("Can't parse, " + str(E)) entry = fp.entries[0] if not feed_fg and not feed_bg: site_name_effect = "[\x02%s\x02]" % (feed_site_name) elif feed_fg and not feed_bg: site_name_effect = "[\x02\x03%s%s\x03\x02]" % (feed_fg, feed_site_name) elif feed_fg and feed_bg: site_name_effect = "[\x02\x03%s,%s%s\x03\x02]" % (feed_fg, feed_bg, feed_site_name) if hasattr(entry, 'id'): article_url = entry.id elif hasattr(entry, 'feedburner_origlink'): article_url = entry.feedburner_origlink else: article_url = entry.links[0].href # only print if new entry sql_text = (feed_channel, feed_site_name, entry.title, article_url) cursor_recent.execute("SELECT * FROM recent WHERE channel = ? AND site_name = ? and article_title = ? AND article_url = ?", sql_text) if len(cursor_recent.fetchall()) < 1: short_url = url_module.short(article_url) if short_url: short_url = short_url[0][1][:-1] else: short_url = article_url response = site_name_effect + " %s \x02%s\x02" % (entry.title, short_url) if entry.updated: response += " - %s" % (entry.updated) jenni.msg(feed_channel, response) t = (feed_channel, feed_site_name, entry.title, article_url,) cursor_recent.execute("INSERT INTO recent VALUES (?, ?, ?, ?)", t) conn_recent.commit() cursor_recent.close() conn.commit() c.close() else: if DEBUG: jenni.msg(feed_channel, u"Skipping previously read entry: %s %s" % (site_name_effect, entry.title))