def ltc(self): url = 'https://btc-e.com/api/2/ltc_usd/ticker' request = Browser(url) if not request: return "Couldn't retrieve LTC data." try: json = request.json() except: return "Couldn't parse LTC data." locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') last = locale.currency(json['ticker']['last']) low = locale.currency(json['ticker']['low']) high = locale.currency(json['ticker']['high']) if self.values: try: value = locale.currency(float(json['ticker']['last']) * float(self.values[0])) except: return "Couldn't compute LTC value." return 'Value of %s LTC is %s' % (self.values[0], value) else: return 'Litecoin, Last: %s, Low: %s, High: %s' % (last, low, high)
def g(self): if not self.values: return "Enter a search" # If values was a string you don't need the join/etc params = { 'v': '1.0', 'rsz': 'large', 'start': '0', 'q': "+".join(self.values) } try: request = Browser( 'http://ajax.googleapis.com/ajax/services/search/web', params=params) json = request.json() except: return "Something's buggered up" if len(json["responseData"]["results"]) == 0: return "No results" result = json["responseData"]["results"][0] title = result["titleNoFormatting"] link = result["unescapedUrl"] return "%s @ %s" % (title, link)
def g(self): if not self.values: return "Enter a search" key = self.config.google_search_key gid = self.config.google_search_id params = {'key': key, 'cx': gid, 'fields': 'items(title,link)', 'q': "+".join(self.values)} try: request = Browser( 'https://www.googleapis.com/customsearch/v1', params=params) if request.error: return request.error json = request.json() except Exception as e: return "Something's buggered up: %s" % str(e) try: if len(json["items"]) == 0: return "No results" except: return "Non-standard response: https://www.google.com/#q=%s" % '+'.join(self.values) result = json["items"][0] title = result["title"] link = result["link"] return "%s @ %s" % (title, link)
def excuse(self): try: html = Browser('http://developerexcuses.com') except: return 'You\'re on your own this time bud' parsed = html.soup() return parsed.a.text
def get_currency_price(self, name, source, dest='USD', has_gdax=False): """ Retrieve the aggregated last, low and high prices of a crypto currency. """ value_of = None if self.values: try: value_of = int(self.values[0]) except: pass url = 'https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=%s&tsym=%s' request = Browser(url % (source, dest)) if not request: return "Couldn't retrieve %s data." % source.upper() try: json = request.json()['Data']['AggregatedData'] except: return "Couldn't parse %s data." % source.upper() last = float(json['PRICE']) low = float(json['LOW24HOUR']) high = float(json['HIGH24HOUR']) gdax = None if has_gdax: gdax = self.get_gdax_price(source, dest, value_of) if value_of: try: value = float(json['PRICE']) * float(value_of) except: return "Couldn't compute %s value." % source.upper() if gdax: gdax = ", GDAX: %s" % self.format_currency(gdax) return 'Value of %s %s is %s%s' % (value_of, source.upper(), self.format_currency(value), gdax if gdax else '') else: response = OrderedDict() response['Last'] = self.format_currency(last) response['Low'] = self.format_currency(low) response['High'] = self.format_currency(high) if gdax: response['GDAX'] = gdax prices = ", ".join([": ".join([key, str(val)]) for key, val in response.items()]) return '%s, %s' % (name, prices)
def giphy(self): if not self.values: return 'Giphy what?' query = '+'.join(self.values) try: json = Browser('http://api.giphy.com/v1/gifs/random?tag=%s&api_key=dc6zaTOxFJmzC' % query) parsed = json.json() return parsed['data']['image_original_url'] except: return 'Unable to giphy'
def startup(self): url = 'http://itsthisforthat.com/api.php?json' request = Browser(url) if request.error: return 'Total fail: %s' % request.error sys.exit() try: json = request.json() return 'It\'s a %s for %s' % (json['this'].lower().capitalize(), json['that'].lower().capitalize()) except Exception as e: return 'It\'s a replacement for itsthisforthat.com... (Request failed)'
def isitdown(self): if not self.values: return "Is what down?" url = 'http://www.isitdownrightnow.com/check.php?domain=%s' % self.values[0] result = Browser(url) found = result.read().find('UP') if found > 0: status = '%s is up' % self.values[0] else: status = '%s is down' % self.values[0] return status
def mta(self): if not self.values: return 'Which line?' q = self.values[0] info = Browser('http://web.mta.info/status/serviceStatus.txt').soup() lines = info.find_all('line') for line in lines: if q.lower() in line.find('name').string.lower(): message = '%s: %s' % (line.find('name').string, line.find('status').string) if line.find('status').string != 'GOOD SERVICE': message = '%s %s%s' % (message, 'http://www.mta.info/status/subway/', line.find('name').string) return message return 'Not found'
def isitdown(self): if not self.values: return "Is what down?" url = 'http://www.isitdownrightnow.com/check.php?domain=%s' % self.values[ 0] result = Browser(url) found = result.read().find('UP') if found > 0: status = '%s is up' % self.values[0] else: status = '%s is down' % self.values[0] return status
def get_gdax_price(self, source, dest='USD', value_of=None): """ Retrieve the GDAX price of a specific currency. """ gdax = '(No result)' gdax_url = 'https://api.gdax.com/products/%s-%s/ticker' % (source.upper(), dest.upper()) g_request = Browser(gdax_url) try: g_json = g_request.json() gdax = self.format_currency(float(g_json['price'])) if value_of: gdax = float(g_json['price']) * float(value_of) except: pass return gdax
def rhyme(self): if not self.values: return "Enter a line" url = "http://emalmi.kapsi.fi/battlebot/battlebot.fcgi" params = "l=en&q=" + '+'.join(self.values) request = Browser('%s?%s' % (url, params)) try: json = request.json() rhyme = choice(json["rhymes"]) return rhyme['line'] except Exception as e: self.chat('...') self._act("drops mic in shame.") return
def anagram(self): if not self.values: return "Enter a word or phrase" word = ''.join(self.values) url = 'http://www.anagramica.com/best/%s' % word request = Browser(url) if not request: return 'Error' try: json = request.json() return json['best'] except Exception as e: self.chat("Couldn't parse.", str(e)) return
def character(self): link = self._call({ 'category': 'characters', 'name': ' '.join(self.values), }) data = simplejson.loads(Browser(link).read()) return data['results'][0]['description']
def aleksey(self): url = 'https://spreadsheets.google.com/feeds/list/0Auy4L1ZnQpdYdERZOGV1bHZrMEFYQkhKVHc4eEE3U0E/od6/public/basic?alt=json' try: json = Browser(url).json() except: return 'Somethin dun goobied.' entry = choice(json['feed']['entry']) return entry['title']['$t']
def catfact(self): url = 'http://catfacts-api.appspot.com/api/facts' try: json = Browser(url).json() except: return 'No meow facts.' return json['facts'][0]
def fml(self): #return "The FML api is discontinued and I don't feel like making a site scraper. RIP, whiny teens." url = 'http://www.fmylife.com' if self.values: url += '/search/' + '%20'.join(self.values) else: url += '/random' try: request = Browser(url) soup = request.soup() fml = choice(soup.find('div', {'class': 'infinite-scroll'}).find_all(string=re.compile('Today'))).strip() return fml except Exception as e: return "Nobody's life got f****d like that"
def advice(self): url = 'http://api.adviceslip.com/advice' try: json = Browser(url).json() except: return 'Use a rubber if you sleep with dcross\'s mother.' return json['slip']['advice'] + ".. except in bed."
def weather(self): if not self.values: return "Please enter a zip/location" if not self.secrets.weather_api: return "wunderground api key is not set" if not self.values: params = "autoip.json?geo_ip=%s" % self.lastip else: params = "%s.json" % self.values[0] base = "http://api.wunderground.com/api/%s/conditions/q/" % self.secrets.weather_api url = base + params try: request = Browser(url) except: return "Couldn't get weather." if not request: return "Couldn't get weather." try: json = request.json() json = json['current_observation'] except: return "Couldn't parse weather." location = json['display_location']['full'] condition = json['weather'] temp = json['temperature_string'] humid = json['relative_humidity'] wind = json['wind_string'] feels = json['feelslike_string'] hourly = 'http://www.weather.com/weather/hourbyhour/l/%s' % self.values[ 0] radar = shorten('http://www.weather.com/weather/map/interactive/l/%s' % self.values[0]) base = "%s, %s, %s, Humidity: %s, Wind: %s, Feels like: %s, Radar: %s" return base % (location, condition, temp, humid, wind, feels, radar)
def ety(self): if not self.values: return "Enter a word" word = self.values[0] params = { 'allowed_in_frame': '0', 'searchmode': 'term', 'search': word } request = Browser("http://www.etymonline.com/index.php", params) if not request: return 'Error' cont = request.soup() heads = cont.findAll("dt") defs = cont.findAll("dd") if not len(defs): return "Couldn't find anything" try: ord = int(self.values[1]) except: ord = 1 if ord > len(defs): ord = 1 ord -= 1 if ord < 0: ord = 0 try: _word = ''.join(heads[ord].findAll(text=True)) _def = ''.join(defs[ord].findAll(text=True)) except Exception as e: self.chat('Failed to parse.', error=e) return return "Etymology %s of %s for %s: %s" % (str(ord + 1), str( len(defs)), _word, _def)
def mta(self): if not self.values: return 'Which line?' q = self.values[0] info = Browser('http://web.mta.info/status/serviceStatus.txt').soup() lines = info.find_all('line') for line in lines: if q.lower() in line.find('name').string.lower(): message = '%s: %s' % (line.find('name').string, line.find('status').string) if line.find('status').string != 'GOOD SERVICE': message = '%s %s%s' % ( message, 'http://www.mta.info/status/subway/', line.find('name').string) return message return 'Not found'
def weather(self): if not self.values: return "Please enter a zip/location" if not self.secrets.weather_api: return "wunderground api key is not set" if not self.values: params = "autoip.json?geo_ip=%s" % self.lastip else: params = "%s.json" % self.values[0] base = "http://api.wunderground.com/api/%s/conditions/q/" % self.secrets.weather_api url = base + params try: request = Browser(url) except: return "Couldn't get weather." if not request: return "Couldn't get weather." try: json = request.json() json = json['current_observation'] except: return "Couldn't parse weather." location = json['display_location']['full'] condition = json['weather'] temp = json['temperature_string'] humid = json['relative_humidity'] wind = json['wind_string'] feels = json['feelslike_string'] hourly = 'http://www.weather.com/weather/hourbyhour/l/%s' % self.values[0] radar = shorten('http://www.weather.com/weather/map/interactive/l/%s' % self.values[0]) base = "%s, %s, %s, Humidity: %s, Wind: %s, Feels like: %s, Radar: %s" return base % (location, condition, temp, humid, wind, feels, radar)
def urlparse(self, url): if self.cleanse(url) == False: return [url] fubs = 0 title = "Couldn't get title" site = Browser(url) if site.error: self.chat('Total fail: %s' % site.error) return [url] roasted = shorten(url) if not roasted: roasted = "Couldn't roast" fubs += 1 self.chat('%s @ %s' % (unescape(site.title()), roasted)) return [url]
def random(self, values=False, array=False): default = [0, 9999, 1, 1] if not values: values = self.values if not values: return "No values. You probably meant .toss" if values and values[0][:1] == 'd': default[0] = 1 default[1] = values[0][1:] send = default elif 'd' in values[0]: default[0] = 1 num, high = values[0].split('d') default[1] = high default[3] = num send = default elif values: splice = len(values) send = self.values + default[splice:] else: send = default low, high, sets, nums = send base = 'http://qrng.anu.edu.au/form_handler.php?repeats=no&' params = "min_num=%s&max_num=%s&numofsets=%s&num_per_set=%s" % ( low, high, sets, nums) url = base + params # Needs to be vastly improved for other sets site = Browser(url) result = site.read().split(':')[2].strip()[:-6] if array: result = result.split(', ') return result
def random(self, values=False, array=False): default = [0, 9999, 1, 1] if not values: values = self.values if not values: return "No values. You probably meant .toss" if values and values[0][:1] == 'd': default[0] = 1 default[1] = values[0][1:] send = default elif 'd' in values[0]: default[0] = 1 num, high = values[0].split('d') default[1] = high default[3] = num send = default elif values: splice = len(values) send = self.values + default[splice:] else: send = default low, high, sets, nums = send base = 'http://qrng.anu.edu.au/form_handler.php?repeats=no&' params = "min_num=%s&max_num=%s&numofsets=%s&num_per_set=%s" % (low, high, sets, nums) url = base + params # Needs to be vastly improved for other sets site = Browser(url) result = site.read().split(':')[2].strip()[:-6] if array: result = result.split(', ') return result
def fml(self): url = 'http://api.fmylife.com' params = {'language': 'en', 'key': self.secrets.fml_api} if self.values: url += '/view/search' params['search'] = "+".join(self.values) else: url += '/view/random' try: request = Browser(url, params) soup = request.soup() if self.values: fml = choice(soup.find_all("text")).get_text() else: fml = soup.find_all("text")[0].get_text() return fml except Exception as e: return "Nobody's life got f****d like that"
def ety(self): if not self.values: return "Enter a word" word = self.values[0] params = {'allowed_in_frame': '0', 'searchmode': 'term', 'search': word} request = Browser("http://www.etymonline.com/index.php", params) if not request: return 'Error' cont = request.soup() heads = cont.findAll("dt") defs = cont.findAll("dd") if not len(defs): return "Couldn't find anything" try: ord = int(self.values[1]) except: ord = 1 if ord > len(defs): ord = 1 ord -= 1 if ord < 0: ord = 0 try: _word = ''.join(heads[ord].findAll(text=True)) _def = ''.join(defs[ord].findAll(text=True)) except Exception as e: self.chat('Failed to parse.', error=e) return return "Etymology %s of %s for %s: %s" % (str(ord + 1), str(len(defs)), _word, _def)
def ud(self): if not self.values: return "Whatchu wanna know, bitch?" term = ' '.join(self.values) term = term.strip() if term == 'truffle butter': return "You all know what it is, and I don't want to have to read this shit again." try: request = Browser('http://www.urbandictionary.com/define.php', params={'term': term}) soup = request.soup() except: return "parse error" elem = soup.find('div', {'class': 'meaning'}) try: defn = [] for string in elem.stripped_strings: defn.append(string) except: return "couldn't find anything" if not defn: return "couldn't find anything" # Unfortunately, BeautifulSoup doesn't parse hexadecimal HTML # entities like ' so use the parser for any stray entities. response = [] for paragraph in defn: wrapped = textwrap.wrap(paragraph, 200) _response = unescape(' '.join(wrapped)) response.append(_response) return ' '.join(response)
def g(self): if not self.values: return "Enter a search" # If values was a string you don't need the join/etc params = {'v': '1.0', 'rsz': 'large', 'start': '0', 'q': "+".join(self.values)} try: request = Browser( 'http://ajax.googleapis.com/ajax/services/search/web', params=params) json = request.json() except: return "Something's buggered up" if len(json["responseData"]["results"]) == 0: return "No results" result = json["responseData"]["results"][0] title = result["titleNoFormatting"] link = result["unescapedUrl"] return "%s @ %s" % (title, link)
def doge(self): url = 'http://dogecoinaverage.com/USD.json' request = Browser(url) if not request: return "Couldn't retrieve DOGE data." try: json = request.json() except: return "Couldn't parse DOGE data." weighted = float(json['vwap']) if self.values: try: locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') value = locale.currency(weighted * float(self.values[0])) except: return "Couldn't compute DOGE value." return 'Value of %s DOGE is %s' % (self.values[0], value) else: return 'Dogecoin, Volume-Weighted Average Price: $%s' % (weighted)