def refresh_cache(inp): print "[+] refreshing furry cache" global cache global lastsearch cache = [] search = inp # these are special search queries in the booru for word in ['explicit', 'safe', 'nsfw', 'sfw']: search = search.replace(word, 'rating:' + word) lastsearch = search if inp == '': postjson = request.get_json('http://e621.net/posts.json?limit=10') else: postjson = request.get_json( 'http://e621.net/posts.json?limit=10&tags={}'.format( request.urlencode(search))) posts = postjson["posts"] for i in range(len(posts)): post = posts[i] id = post["id"] score = post["score"]["total"] url = post["file"]["url"] rating = post["rating"] tags = ", ".join(post["tags"]["general"]) cache.append((id, score, url, rating, tags)) random.shuffle(cache) return
def bible(inp, bot=None): """bible <passage> -- gets <passage> from the Bible (ESV)""" API_KEY = bot.config['api_keys'].get('english_bible', None) if API_KEY is None: return 'Bible error: no API key configured' url = "https://api.esv.org/v3/passage/text/?q=" + request.urlencode(inp) json = request.get_json(url, headers={"Authorization": "Token " + API_KEY}) if 'detail' in json: return 'Bible error (lol): ' + json['detail'] if 'passages' in json and len(json['passages']) == 0: return '[Bible] Not found' output = '[Bible]' if 'canonical' in json: output = output + ' \x02' + json['canonical'] + '\x02:' if 'passages' in json: output = output + ' ' + compress_whitespace('. '.join(json['passages'])) if len(output) > 320: output = output[:320] + '...' return output
def search(input): json = request.get_json(base_url + request.urlencode(input)) if json is None or "error" in json or "errors" in json: return ["the server f****d up"] data = [] for item in json['list']: definition = item['definition'] word = item['word'] example = item['example'] votes_up = item['thumbs_up'] votes_down = item['thumbs_down'] output = '\x02' + word + '\x02 ' try: votes = int(votes_up) - int(votes_down) if votes > 0: votes = '+' + str(votes) except: votes = 0 if votes != 0: output = output + '(' + str(votes) + ') ' output = output + clean_text(definition) if example: output = output + ' \x02Example:\x02 ' + clean_text(example) data.append(output) return data
def board(inp): "board <board> <regex> -- Search all the posts on a board and return matching results" thread_join_timeout_seconds = 10 results_deque = deque() inp = inp.split(" ") board = inp[0] string = " ".join(inp[1:]) json_url = "https://a.4cdn.org/{0}/threads.json".format(board) sections = ["com", "name", "trip", "email", "sub", "filename"] threads_json = request.get_json(json_url) search_specifics = {"sections": sections, "board": board, "string": string} thread_pool = [] for page in threads_json: for thread in page["threads"]: t = Thread(None, target=search_thread, args=(results_deque, thread["no"], search_specifics)) t.start() thread_pool.append(t) for _thread in thread_pool: if _thread.is_alive(): _thread.join(float(thread_join_timeout_seconds)) results = process_results(board, string, results_deque) return "%s" % (results)
def gb_refresh_cache(inp): global gelbooru_cache gelbooru_cache = [] num = 0 search = (inp.replace(' ', '+').replace('explicit', 'rating:explicit').replace( 'nsfw', 'rating:explicit').replace( 'safe', 'rating:safe').replace('sfw', 'rating:safe')) posts = request.get_json( u'https://gelbooru.com/index.php?page=dapi&s=post&q=index&limit=20&json=1', params={'tags': search}) while num < len(posts): gelbooru_cache.append(( posts[num].get('id'), posts[num].get('score'), posts[num].get('file_url'), posts[num].get('rating'), posts[num].get('tags'), )) num += 1 random.shuffle(gelbooru_cache) return
def get_post(booru_id, tags=''): if tags: cache_key = booru_id + ' ' + tags else: cache_key = booru_id + ' most recent' # if cache has data for this search, then just use that if cache_key_exists(cache_key): return cache_get_item(cache_key) # otherwise we need to download and fill the cache api = boorus[booru_id]['url'] + boorus[booru_id]['api'] if tags: json = request.get_json(api, params={'limit': 20, 'tags': tags}) else: json = request.get_json(api, params={'limit': 80}) if len(json) == 0: return None images = [] for item in json: # skip pixiv, all direct links are "403 denied" if 'pixiv_id' in item and 'file_url' not in item: continue image = { 'id': item['id'], # 'created': item['created_at'], 'file_url': item['file_url'], 'file_size': item['file_size'], 'rating': item.get('rating', 'e'), 'score': item.get('score', 0), 'tags': item.get('tags', item.get('tag_string', 'unknown')) } if not image['file_url'].startswith('http'): image['file_url'] = boorus[booru_id]['url'] + image['file_url'] cache_append_item(cache_key, image) # yes, check again if we added anything at all if cache_key_exists(cache_key): return cache_get_item(cache_key) else: return None
def search(instance, query): if instance not in INSTANCES: return wiki = INSTANCES[instance] search = request.get_json(wiki['search'] + request.urlencode(query)) titles = search[1] descriptions = search[2] urls = search[3] return (titles, descriptions, urls)
def get_more_detail(api_path, gid): api_gid = 'gid_{}'.format(gid.replace('/', '_').replace('-', '_')) detail_linescore = '{}/{}/linescore.json'.format(api_path, api_gid) detail_eventlog = '{}/{}/eventLog.xml'.format(api_path, api_gid) try: linescore = get_json(detail_linescore) except Exception as e: return e if not isinstance(linescore, dict): return Exception('linescore is not an object') try: linescore = linescore['data']['game'] except KeyError: return Exception('linescore structure is unexpected') # count balls = linescore.get('balls', 'unkn') strikes = linescore.get('strikes', 'unkn') outs = linescore.get('outs', 'unkn') runners_onbase = linescore.get('runner_on_base_status', 'unkn') pitcher = linescore.get('current_pitcher', dict()).get('last_name', 'unkn') batter = linescore.get('current_batter', dict()).get('last_name', 'unkn') # bonus latest_event = '' try: events_xml = BeautifulSoup(get(detail_eventlog)) events = events_xml.find_all('event') maxval = -999 for event in events: if int(event['number']) > maxval and event['description'] != '': maxval = int(event['number']) latest_event = event['description'] except Exception as e: latest_event = e return {'balls':balls, 'strikes':strikes, 'outs':outs, 'onbase':runners_onbase, 'pitcher':pitcher, 'batter':batter, 'latest':latest_event}
def image(inp, bot=None): """image <query> -- Returns the first Google Image result for <query>.""" if type(inp) is unicode: filetype = None else: inp, filetype = inp.string[1:].split('.') cx = bot.config['api_keys']['googleimage'] search = '+'.join(inp.split()) key = bot.config['api_keys']['google'] if filetype: url = API_URL + u'?key={}&cx={}&searchType=image&num=1&safe=off&q={}&fileType={}' result = request.get_json(url.format(key, cx, search.encode('utf-8'), filetype))['items'][0]['link'] else: url = API_URL + u'?key={}&cx={}&searchType=image&num=1&safe=off&q={}' result = request.get_json(url.format(key, cx, search.encode('utf-8')))['items'][0]['link'] try: return web.isgd(result) except Exception as e: print '[!] Error while shortening:', e return result
def google(inp, bot=None): """google <query> -- Returns first google search result for <query>.""" inp = request.urlencode(inp) url = API_URL + u'?key={}&cx={}&num=1&safe=off&q={}' cx = bot.config['api_keys']['googleimage'] search = '+'.join(inp.split()) key = bot.config['api_keys']['google'] result = request.get_json(url.format(key, cx, search.encode('utf-8')))['items'][0] title = result['title'] content = formatting.remove_newlines(result['snippet']) link = result['link'] try: return u'{} -- \x02{}\x02: "{}"'.format(web.isgd(link), title, content) except Exception: return u'{} -- \x02{}\x02: "{}"'.format(link, title, content)
def search_thread(results_deque, thread_num, search_specifics): """ Searches every post in thread thread_num on board board for the string provided. Returns a list of matching post numbers. """ json_url = "https://a.4cdn.org/{0}/thread/{1}.json".format( search_specifics["board"], thread_num) thread_json = request.get_json(json_url) if thread_json is not None: re_search = None for post in thread_json["posts"]: user_text = "".join([ post[s] for s in search_specifics["sections"] if s in post.keys() ]) re_search = re.search(search_specifics["string"], user_text, re.UNICODE + re.IGNORECASE) if re_search is not None: results_deque.append("{0}#p{1}".format(thread_num, post["no"]))
def info(id): info = request.get_json('http://vimeo.com/api/v2/video/' + id + '.json') if not info or len(info) == 0: return title = info[0]['title'] length = timeformat.format_time(info[0]["duration"], simple=True) likes = format(info[0]['stats_number_of_likes'], ',d') views = format(info[0]['stats_number_of_plays'], ',d') uploader = info[0]['user_name'] upload_date = info[0]['upload_date'] output = [] output.append('\x02' + title + '\x02') output.append('length \x02' + length + '\x02') output.append(likes + ' likes') output.append(views + ' views') output.append('\x02' + uploader + '\x02 on ' + upload_date) return ' - '.join(output)
def parse_ip(ip): ip = request.urlencode(ip) data = request.get_json('https://ipinfo.io/' + ip, headers={'Accept': 'application/json'}) if data.get('error') is not None: if data['error'].get('title') == 'Wrong ip': return '[IP] That IP is not valid' else: return '[IP] Some error ocurred' # example for 8.8.8.8 loc = data.get('loc') # 37.40, -122.07 city = data.get('city') # Mountain View country = data.get('country') # US region = data.get('region') # California hostname = data.get('hostname') # dns.google timezone = data.get('timezone') # unreliable ip = data.get('ip') # 8.8.8.8 org = data.get('org') # Google LLC return u"[IP] {} - {}, {}, {}".format(org, city, region, country)
def gelbooru_url(match): posts = request.get_json( 'https://gelbooru.me/index.php?page=dapi&s=post&q=index&limit=1&id={}&json=1' .format(match.group(1))) id, score, url, rating, tags = ( posts[0].get('id'), posts[0].get('score'), posts[0].get('file_url'), posts[0].get('rating'), posts[0].get('tags'), ) if rating == 'e': rating = "\x02\x034NSFW\x03\x02" elif rating == 'q': rating = "\x02\x037Questionable\x03\x02" elif rating == 's': rating = "\x02\x033Safe\x03\x02" return u'\x02[{}]\x02 Score: \x02{}\x02 - Rating: {} - {} - {}'.format( id, score, rating, url, tags[:75].strip())
def radio(id): if id not in radios: return "we dont support that radio. try one of the following: " + ", ".join( radios.keys()) radio = radios[id] try: data = request.get_json(radio['api']) except ValueError: return "the radio " + id + " has some server issues right now. try again later" sources = data.get('icestats', {}).get('source', False) if sources is False: return "the radio " + id + " is offline" def build_message(source): title = source.get('title', 'Untitled') listeners = source.get('listeners', 0) #genre = sourc.get('genre', 'unknown') return u'{} is playing \x02{}\x02 for {} listeners. listen: {}'.format( id, title, listeners, radio['homepage']) # the icecast api returns either one object (for one stream) # or a list of sources (for multiple streams available) if isinstance(sources, dict): if sources.get('listenurl', '').endswith(radio['source']): return build_message(sources) elif isinstance(sources, list): for source in sources: if source.get('listenurl', '').endswith(radio['source']): return build_message(source) # didn't find it return "the radio " + id + " is offline"
def consume_api(id): json = request.get_json(base_url + id + '?' + query_string) return json
def mlb(inp, say=None): api_base = '{}/{}'.format(MLB_DEPRECATED_API, get_api_time_of_day()) api_string = '{}/grid.json'.format(api_base) try: games_today = get_json(api_string) except Exception: return 'Failed to get games today (Note: gd2 API *is* deprecated).' if not isinstance(games_today, dict): return 'Failed to get games today: grid.json is not an object.' try: games = games_today['data']['games']['game'] except KeyError: return 'No Games Today.' if not isinstance(games, list): games = [games] output = [] for game in games: away_team = game.get('away_name_abbrev', '') away_score = game.get('away_score', '0') if away_score == '': away_score = 0 home_team = game.get('home_name_abbrev', '') home_score = game.get('home_score', '0') if home_score == '': home_score = 0 inning = game.get('top_inning', '-') if inning == 'Y': inning = '^' elif inning == 'N': inning = 'v' else: inning = '-' game_status = game.get('status', '') if 'Pre' == game_status[0:3]: game_status = game.get('event_time', 'P') inning = '' elif 'Final' == game_status: game_status = 'F' inning = '' else: game_status = game.get('inning', '0') outstring = OUTGAME_STRING.format(away_team, away_score, game_status, inning, home_team, home_score) if inp.lower() == away_team.lower() or inp.lower() == home_team.lower(): if inning != '': details = get_more_detail(api_base, game.get('id', 'null')) if isinstance(details, Exception): print 'WARNING: API may be broken: {}'.format(details) return outstring outstring += ' Count: {}-{}'.format(details['balls'], details['strikes']) outstring += ' Outs: {}'.format(details['outs']) outstring += ' OnBase: {}'.format(details['onbase']) outstring += ' Pitcher: {}'.format(details['pitcher']) outstring += ' Batter: {}'.format(details['batter']) if isinstance(details['latest'], Exception): print 'WARNING: API For latest events is broken: {}'.format(details['latest']) elif details['latest'] != "": say(outstring) say('Latest: {}'.format(details['latest'])) return return outstring else: output.append(outstring) if len(output) == 0: return 'No Games Today.' else: return 'Time in EST: ' + ', '.join(output)