def handle(send, msg, args): """ Get titles for urls. | Generate a short url. | Get the page title. """ #FIXME: don't hardcode. if "http://git.io" in msg: return # crazy regex to match urls match = re.search(r"""(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.] [a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s() <>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))* \)|[^\s`!()\[\]{};:'\".,<>?....]))""", msg) if match: url = match.group(1) title = get_title(url) short = get_short(url) last = args['db'].query(Urls).filter(Urls.url == url).order_by(Urls.time.desc()).first() if args['config']['feature'].getboolean('linkread'): if last: lasttime = strftime('at %H:%M:%S on %Y-%m-%d', localtime(last.time)) send("Url %s previously posted %s by %s -- %s" % (short, lasttime, last.nick, title)) else: send('** %s - %s' % (title, short)) args['db'].add(Urls(url=url, title=title, nick=args['nick'], time=time()))
def handle(send, msg, args): """ Get titles for urls. | Generate a short url. | Get the page title. """ # FIXME: don't hardcode. if "http://git.io" in msg: return # crazy regex to match urls match = re.search( r"""(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.] [a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s() <>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))* \)|[^\s`!()\[\]{};:'\".,<>?....]))""", msg) if match: url = match.group(1) title = get_title(url) short = get_short(url) last = args['db'].query(Urls).filter(Urls.url == url).order_by( Urls.time.desc()).first() if args['config']['feature'].getboolean('linkread'): if last: lasttime = strftime('at %H:%M:%S on %Y-%m-%d', localtime(last.time)) send("Url %s previously posted %s by %s -- %s" % (short, lasttime, last.nick, title)) else: send('** %s - %s' % (title, short)) args['db'].add( Urls(url=url, title=title, nick=args['nick'], time=time()))
def handle(send, msg, args): """ Get titles for urls. | Generate a short url. | Get the page title. """ worker = args['handler'].workers result = worker.run_pool(get_urls, [msg]) try: urls = result.get(5) except multiprocessing.TimeoutError: worker.restart_pool() send('Url regex timed out.') return for url in urls: # Prevent botloops if args['db'].query(Urls).filter(Urls.url == url, Urls.time > time.time() - 10).count(): return title = urlutils.get_title(url) key = args['config']['api']['googleapikey'] short = urlutils.get_short(url, key) last = args['db'].query(Urls).filter(Urls.url == url).order_by(Urls.time.desc()).first() if args['config']['feature'].getboolean('linkread'): # 604800 is the number of seconds in a week. if last and (time.time() - last.time) < 604800: lasttime = time.strftime('at %H:%M:%S on %Y-%m-%d', time.localtime(last.time)) send("Url %s previously posted %s by %s -- %s" % (short, lasttime, last.nick, title)) else: send('** %s - %s' % (title, short)) args['db'].add(Urls(url=url, title=title, nick=args['nick'], time=time.time()))
def handle(send, msg, args): """ Get titles for urls. | Generate a short url. | Get the page title. """ # FIXME: don't hardcode. if "http://git.io" in msg: return #FIXME: also, don't hardcode. if "polr" in msg and "http" in msg: return # crazy regex to match urls match = re.search(r"""(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.] [a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s() <>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))* \)|[^\s`!()\[\]{};:'\".,<>?....]))""", msg) if match: url = match.group(1) if "!" + url in msg: return title = get_title(url) short = get_short(url, polr.api(apikey = args['config']['api']['polrkey'])) if args['config']['feature'].getboolean('linkread'): send('** %s - %s' % (title, short))
def handle(send, msg, args): """ Get titles for urls. | Generate a short url. | Get the page title. """ worker = args['handler'].workers result = worker.run_pool(get_urls, [msg]) try: urls = result.get(5) except multiprocessing.TimeoutError: worker.restart_pool() send('Url regex timed out.') return for url in urls: # Prevent botloops if args['db'].query(Urls).filter(Urls.url == url, Urls.time > time.time() - 10).count(): return title = urlutils.get_title(url) key = args['config']['api']['googleapikey'] short = urlutils.get_short(url, key) last = args['db'].query(Urls).filter(Urls.url == url).order_by( Urls.time.desc()).first() if args['config']['feature'].getboolean('linkread'): # 604800 is the number of seconds in a week. if last and (time.time() - last.time) < 604800: lasttime = time.strftime('at %H:%M:%S on %Y-%m-%d', time.localtime(last.time)) send("Url %s previously posted %s by %s -- %s" % (short, lasttime, last.nick, title)) else: send('** %s - %s' % (title, short)) args['db'].add( Urls(url=url, title=title, nick=args['nick'], time=time.time()))
def cmd(send, msg, args): """Gets a random Reddit post. Syntax: {command} <subreddit> """ if args['name'] == 'srepetsk': msg = 'nottheonion' if msg and not check_exists(msg): send("Non-existant subreddit.") return subreddit = '/r/%s' % msg if msg else '' url = get('http://reddit.com%s/random' % subreddit, headers={'User-Agent': 'CslBot/1.0'}).url send('** %s - %s' % (get_title(url), get_short(url)))
def cmd(send, msg, args): """Gets a random Reddit post. Syntax: !reddit <subreddit> """ if args["name"] == "srepetsk": msg = "nottheonion" if msg and not check_exists(msg): send("Non-existant subreddit.") return subreddit = "/r/%s" % msg if msg else "" url = get("http://reddit.com%s/random" % subreddit, headers={"User-Agent": "CslBot/1.0"}).url send("** %s - %s" % (get_title(url), get_short(url)))
def cmd(send, msg, args): """Gets a random Reddit post. Syntax: !reddit <subreddit> """ if args['name'] == 'srepetsk': msg = 'nottheonion' if msg and not check_exists(msg): send("Non-existant subreddit.") return subreddit = '/r/%s' % msg if msg else '' urlstr = 'http://reddit.com%s/random?%s' % (subreddit, time.time()) url = get(urlstr, headers={'User-Agent': 'CslBot/1.0'}).url key = args['config']['api']['googleapikey'] send('** %s - %s' % (get_title(url), get_short(url, key)))
def cmd(send, msg, args): """Googles something. Syntax: {command} <term> """ if not msg: send("Google what?") return data = get('http://ajax.googleapis.com/ajax/services/search/web', params={'v': '1.0', 'q': msg}).json() results = data['responseData']['results'] if len(results) == 0: send("Google didn't say much.") else: url = results[0]['unescapedUrl'] title = get_title(url) if len(title) > 128: title = title[:125] + "..." send("Google says %s (Title: %s)" % (url, title))
def random_post(subreddit, apikey): """ Gets a random post from a subreddit and returns a title and shortlink to it """ urlstr = 'http://reddit.com%s/random?%s' % ('/r/' + subreddit, time.time()) url = get(urlstr, headers={'User-Agent': 'CslBot/1.0'}).url return '** %s - %s' % (get_title(url), get_short(url, apikey))