def search(board, search): res = [] catalog = json.load( urllib.urlopen('https://a.4cdn.org/%s/catalog.json' % board)) for i in catalog: for j in i['threads']: if search.lower() in j.get('sub', '').lower() or search.lower() in j.get( 'com', '').lower(): subject = j.get('sub', 'No subject') subject = unescape(subject) post = j.get('com', 'Empty post') post = format(post) if len(post) > 100: # close color here also post = post[:100] + '...' boardLink = 'https://boards.4chan.org/%s/thread/%s' % (board, j['no']) if subject == 'No subject': text = u'/%s/ · %s · %s (R:%s, I:%s)' % \ (board, post, boardLink, j['replies'], j['images']) else: text = u'/%s/ · %s · %s · %s (R:%s, I:%s)' % \ (board, subject, post, boardLink, j['replies'], j['images']) res.append(text) return res
def print_me(self, print_summary=False, seen_as_new=False): title = self.entry.title link = self.entry.link try: summary = unescape(strip(self.entry.summary)) except AttributeError: summary = "" if 'published' in self.entry.keys(): published = self.entry.published.rsplit(' ', 1)[0] else: published = None if seen_as_new: str_new = " \x02new!\02 " else: str_new = " " if published is not None: print_console("%s%s%s - %s (%s)" % (self.feed.logo, str_new, title, link, published)) else: print_console("%s%s%s - %s" % (self.feed.logo, str_new, title, link)) if print_summary: for l in summary.split("\n"): if len(l) > 0: print_console("%s %s" % (self.feed.logo, l))
def getTweet(user, n): now = int(time.time()) t_logo = "0,10twitter" try: t_api = pytwitter.Api(consumer_key = 'laKcPz3kAAH3TVz8wIRAA', consumer_secret = 'P7CD74v1ea5dO9JvJvv0blAmZaGmhQebAJIH2XLCI', access_token_key = '1523563723-gcn8yyeFiGK1PlxfnoPve9j0QWO3OVP2qyfhTCs', access_token_secret = 'QihKi7KCPFD7n9Yq3AFXDgWVc2vO3dmlzhClgsDxrU0') t_user = t_api.GetUser(None, user)._screen_name tweets = t_api.GetUserTimeline(screen_name = t_user, count = 200) if not tweets: return "%s User: %s has no tweets" % (t_logo, t_user) else: tweet = tweets[n].GetText() t = int(tweets[n].GetCreatedAtInSeconds()) created = "Posted %s ago" % (datetime.timedelta(seconds=now-t)) tweet = unescape(tweet).replace('\n', ' ') return "%s @%s: %s (%s)" % (t_logo, t_user, tweet, created) except pytwitter.TwitterError as e: myprint("TwitterError: %s" % (e)) return "%s Error: Nope." % (t_logo) except IndexError: return "%s Error: You have gone too far (keep below 200)" % (t_logo)
def search(board, search): res = [] try: catalog = json.load(urllib.urlopen('https://a.4cdn.org/%s/catalog.json' % board)) for i in catalog: for j in i['threads']: if search.lower() in j.get('sub', '').lower() or search.lower() in j.get('com', '').lower(): subject = j.get('sub', 'Empty subject') subject = unescape(subject) post = j.get('com', 'Empty post') post = format(post) if len(post) > 100: # close color here also post = post[0:100] + '...' boardLink = 'https://boards.4chan.org/%s/thread/%s' % (board, j['no']) text = '%s /%s/ | %s | %s | %s (R:%s, I:%s)' % (logo, board, subject, post, boardLink, j['replies'], j['images']) res.append(text) return res except(IOError): return ['%s Error: Try again later' % logo]
def print_me(self, print_summary=False, seen_as_new=False): title = self.entry.title link = self.entry.link try: summary = unescape(strip(self.entry.summary)) except AttributeError: summary = "" if 'published' in self.entry.keys(): published = self.entry.published.rsplit(' ', 1)[0] else: published = None if seen_as_new: str_new = " \x02new!\02 " else: str_new = " " if published is not None: print_console("%s%s%s - \x1f%s\x1f (%s)" % (self.feed.logo, str_new, title, link, published)) else: print_console("%s%s%s - %s" % (self.feed.logo, str_new, title, link)) if print_summary: for l in summary.split("\n"): l = l.strip() if len(l) > 0: print_console("%s %s" % (self.feed.logo, l))
def search(board, search): res = [] catalog = json.load(urllib.urlopen('https://a.4cdn.org/%s/catalog.json' % board)) for i in catalog: for j in i['threads']: if search.lower() in j.get('sub', '').lower() or search.lower() in j.get('com', '').lower(): subject = j.get('sub', 'No subject') subject = unescape(subject) post = j.get('com', 'Empty post') post = format(post) if len(post) > 100: # close color here also post = post[:100] + '...' boardLink = 'https://boards.4chan.org/%s/thread/%s' % (board, j['no']) if subject == 'No subject': text = u'/%s/ · %s · %s (R:%s, I:%s)' % \ (board, post, boardLink, j['replies'], j['images']) else: text = u'/%s/ · %s · %s · %s (R:%s, I:%s)' % \ (board, subject, post, boardLink, j['replies'], j['images']) res.append(text) return res
def search(query, n): try: results = t_api.GetSearch(term=query, result_type="recent", count=15) if not results: return "%s No results for %s" % (t_logo, query) else: user = results[n].GetUser()._screen_name tweet = unescape(results[n].GetText()).replace('\n', ' ') return "%s @%s: %s" % (t_logo, user, tweet) except IndexError: return "%s Error: YOU'VE GONE TOO FAR (keep below 15)" % (t_logo)
def search(query, n): try: results = t.GetSearch(term=query, result_type="recent", count=15) if not results: print_console("%s No results for %s" % (L, query)) exit(-1) else: user = results[n].GetUser()._screen_name tweet = unescape(results[n].GetText()).replace('\n', ' ') print_console("%s @%s: %s" % (L, user, tweet)) except IndexError: print_console("%s Error: YOU'VE GONE TOO FAR (keep below 15)" % (L)) exit(-1)
def format(comment): comment = unescape(comment) comment = comment.replace('<br>', ' ') comment = comment.replace('<wbr>', '') # greentext open comment = comment.replace('<span class="quote">', '3') comment = comment.replace('<span class="deadlink">', '3') # close color comment = comment.replace('</span>', '') # remove the rest of html tags comment = strip(comment) return comment
def getTweet(user, n): try: username = t.GetUser(None, user)._screen_name tweets = t.GetUserTimeline(None, screen_name=username, count=100) if not tweets: print_console("%s User: %s has no tweets" % (L, username)) exit(-1) else: tweet = tweets[n].GetText() tweet = unescape(tweet).replace('\n', ' ') print_console("%s @%s: %s" % (L, username, tweet)) except IndexError: print_console("%s Error: YOU'VE GONE TOO FAR (keep below 100)" % (L)) exit(-1)
def getThreadInfo(board, threadNo): info = json.load(urllib.urlopen('https://a.4cdn.org/%s/thread/%s.json' % (board, threadNo))) op = info['posts'][0] name = op.get('name', 'Anonymous') subject = op.get('sub', 'Empty subject') subject = unescape(subject) post = op.get('com', 'Empty post') post = format(post) if len(post) > 100: # close color here also post = post[0:100] + '...' return "%s /%s/ | %s | %s | %s | (R:%s, I:%s)" % (logo, board, name, subject, post, op['replies'], op['images'])
def webSearch(terms): g_baseURL = "http://ajax.googleapis.com/ajax/services/search/web?v=1.0&" query = urllib.urlencode({'q': terms}) data = urllib.urlopen(g_baseURL + query).read() j = json.loads(data) res = j['responseData']['results'] output = [] for i in res: title = i['title'].replace("<b>", "").replace("</b>", "") title = unescape(title) url = i['url'] url = urllib.unquote(url) string = "%s: %s ( %s )" % (g_logo, title, url) output.append(string) return output
def getTweet(user, n): now = int(time.time()) try: t_user = t_api.GetUser(None, user)._screen_name tweets = t_api.GetUserTimeline(screen_name=t_user, count=200) if not tweets: return "%s User: %s has no tweets" % (t_logo, t_user) else: tweet = tweets[n].GetText() t = int(tweets[n].GetCreatedAtInSeconds()) created = "Posted %s ago" % (datetime.timedelta(seconds=now - t)) tweet = unescape(tweet).replace('\n', ' ') return "%s @%s: %s (%s)" % (t_logo, t_user, tweet, created) except pytwitter.TwitterError as e: myprint("TwitterError: %s" % (e)) return "%s Error: Nope." % (t_logo) except IndexError: return "%s Error: You have gone too far (keep below 200)" % (t_logo)
# ../mylib.py sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from mylib import print_console, unescape, strip logo = "2G4o8o2g3l4e" if len(sys.argv) < 2: print_console("%s search syntax: !google <terms>" % logo); exit(-1) query = " ".join(sys.argv[1:]) string = "%s %s: " % (logo, query) query = urllib.urlencode ({ 'q' : query }) response = urllib.urlopen('http://ajax.googleapis.com/ajax/services/search/web?v=1.0&' + query).read() json = m_json.loads(response) results = json['responseData']['results'] i = 0 for result in results[:3]: i += 1 title = strip(unescape(result['title'].replace("<b>","").replace("</b>", ""))).encode("utf-8") url = urllib.unquote(result['url']).encode("utf-8") # was URL in the original and that threw a name error exception string = string + "%s (%s); " % (title, url) if i > 0: print_console("%s: %s" % (string, url)) else: print_console("No results!")
print_console("Board %s: Not Found!" % b) else: print_console("Feed %s: Not Found" % logo) if f.bozo == 1: print_console("%s omg :( %s" % (f.bozo, f.bozo_exception)) exit(-1) try: entry = f.entries[n] except IndexError: print_console("Entry not available") exit(-1) title = entry.title link = entry.link summary = unescape(strip(entry.summary)) if "published" in entry.keys(): published = entry.published.rsplit(" ", 1)[0] else: published = None if published is not None: print_console("%s %s - %s (%s)" % (logo, title, link, published)) else: print_console("%s %s - %s" % (logo, title, link)) for l in summary.split("\n"): if len(l) > 0: print_console("%s %s" % (logo, l))
import urllib import sys import code import json as m_json from mylib import print_console, unescape, strip logo = "2G4o8o2g3l4e" if len(sys.argv) < 2: print_console("%s search syntax: !google <terms>"); exit(-1) query = " ".join(sys.argv[1:]) print_console("%s search: %s" % (logo, query)) query = urllib.urlencode ({ 'q' : query }) response = urllib.urlopen('http://ajax.googleapis.com/ajax/services/search/web?v=1.0&' + query).read() json = m_json.loads(response) results = json['responseData']['results'] n = 0 for result in results: title = strip(unescape(result['title'].replace("<b>","").replace("</b>", ""))) url = result['url'] # was URL in the original and that threw a name error exception print_console("%s: %s" % (title, url))
print_console("Board %s: Not Found!" % b) else: print_console("Feed %s: Not Found" % logo) if f.bozo == 1: print_console("%s omg :( %s" % (f.bozo, f.bozo_exception)) exit(-1) try: entry = f.entries[n] except IndexError: print_console("Entry not available") exit(-1) title = entry.title link = entry.link summary = unescape(strip(entry.summary)) if 'published' in entry.keys(): published = entry.published.rsplit(' ', 1)[0] else: published = None if published is not None: print_console("%s %s - %s (%s)" % (logo, title, link, published)) else: print_console("%s %s - %s" % (logo, title, link)) for l in summary.split("\n"): if len(l) > 0: print_console("%s %s" % (logo, l))