def get_streamer_data(streamer): stream = json.loads(http.get_url_data(STREAM_API % streamer)) if 'error' in stream: return None online = stream.get('stream', None) if online is None: live = False else: live = True if live: data = stream['stream'] viewers = data['viewers'] game = data['game'] chan = data['channel'] status = chan['status'] display = chan['display_name'] followers = chan['followers'] msg = '%s |LIVE|: %s - %s | Viewers: %s | Followers: %s' % (display, game, status, viewers, followers) return msg else: chan = json.loads(http.get_url_data(CHANNEL_API % streamer)) game = chan['game'] display = chan['display_name'] views = chan['views'] followers = chan['followers'] msg = '%s |OFFLINE|: %s | Views: %s | Followers: %s' % (display, game, views, followers) return msg
def get_streamer_data(streamer): stream = json.loads(http.get_url_data(STREAM_API % streamer)) if 'error' in stream: return None online = stream.get('stream', None) if online is None: live = False else: live = True if live: data = stream['stream'] viewers = data['viewers'] game = data['game'] chan = data['channel'] status = chan['status'] display = chan['display_name'] followers = chan['followers'] msg = '%s |LIVE|: %s - %s | Viewers: %s | Followers: %s' % ( display, game, status, viewers, followers) return msg else: chan = json.loads(http.get_url_data(CHANNEL_API % streamer)) game = chan['game'] display = chan['display_name'] views = chan['views'] followers = chan['followers'] msg = '%s |OFFLINE|: %s | Views: %s | Followers: %s' % ( display, game, views, followers) return msg
def get_new(): soup = bs4.BeautifulSoup( http.get_url_data('http://www.fmylife.com/random')) for e in soup.find_all('div', {'class': 'post article'}): fml_id = int(e['id']) text = ''.join(e.find('p').find_all(text=True)) cached.append((fml_id, text))
def get_video_info(video_id): final_link = api_video % video_id json_data = json.loads(get_url_data(final_link)) entry = json_data['entry'] author = entry['author'][0]['name']['$t'] views = entry['yt$statistics']['viewCount'] likes = entry['yt$rating']['numLikes'] dislikes = entry['yt$rating']['numDislikes'] title = entry['title']['$t'] final_message = u"%s - by: %s - Views: %s Likes: %s Dislikes: %s" % (title, author, views, likes, dislikes) return final_message
def get_video_info(video_id): final_link = api_video % video_id json_data = json.loads(get_url_data(final_link)) entry = json_data['entry'] author = entry['author'][0]['name']['$t'] views = entry['yt$statistics']['viewCount'] likes = entry['yt$rating']['numLikes'] dislikes = entry['yt$rating']['numDislikes'] title = entry['title']['$t'] final_message = u"%s - by: %s - Views: %s Likes: %s Dislikes: %s" % ( title, author, views, likes, dislikes) return final_message
def weather_command(chat, message, args, sender): if len(args) == 0: chat.SendMessage("You must specify a location.") return json_data = json.loads(http.get_url_data(auto_complete % args[0])) results = json_data['RESULTS'] if results is None or len(results) == 0: chat.SendMessage("No match found for %s." % args[0]) return location = results[0]['name'] zmw = results[0]['zmw'] response = get_forecast(zmw, location) if response is None: return chat.SendMessage(response)
def itunes_command(chat, message, args, sender): if len(args) == 0: chat.SendMessage("Provide a query.") return query = ' '.join(args) query = urllib.quote(query) formatted_url = api_url.format(query) json_data = json.loads(get_url_data(formatted_url)) if 'results' not in json_data: chat.SendMessage("Unable to find a match.") return results = json_data['results'] result = results[0] if result is None: chat.SendMessage("Unable to find a match.") return track = result['trackName'] artist = result['artistName'] album = result['collectionName'] link = result['trackViewUrl'] msg = "Track: %s | Artist: %s | Album: %s | %s" % (track, artist, album, link) chat.SendMessage(msg)
def get_forecast(zmw, loc): conf = config.config() key = conf.get("keys", {}).get("wunderground", None) if key is None: return None url = forecast_api % (key, zmw) data = json.loads(http.get_url_data(url)) forecast = data['forecast'] simple_forecast = forecast['simpleforecast'] day_forecast = simple_forecast['forecastday'][0] low = day_forecast['low'] low_far = low['fahrenheit'] low_cel = low['celsius'] high = day_forecast['high'] high_far = high['fahrenheit'] high_cel = high['celsius'] conditions = day_forecast['conditions'] humidity = day_forecast['avehumidity'] return "%s - Sky Conditions: %s | Temps - High: %sF/%sC | Low: %sF/%sC | Humidity: %s" % (loc, conditions, high_far, high_cel, low_far, low_cel, humidity)
def get_new(): soup = bs4.BeautifulSoup(http.get_url_data('http://www.fmylife.com/random')) for e in soup.find_all('div', {'class': 'post article'}): fml_id = int(e['id']) text = ''.join(e.find('p').find_all(text=True)) cached.append((fml_id, text))