def wiki_helper(request_word): if len(request_word.split(" ")) > 1: request_word = request_word.replace(" ", "_") data = payload_request( "https://en.wikipedia.org/api/rest_v1/page/summary/{t}?redirect=true&origin=*" .format(t=request_word), ) if (data["title"] == "Not found."): return errorEmbedBuilder(data["detail"], "Wikipedia") general_debug("Wikipedia Entry is:" + str(data)) message = length_limiter(data["extract"]) em = Embed( title="Wikipedia Entry:" + data["title"], url=data["content_urls"]["desktop"]["page"], colour=0xFFE9AB, description=message, ) if "thumbnail" in data: em.set_thumbnail(url=data["thumbnail"]["source"]) general_info("Wikifind created and returned embed object") return em
def websterDict_helper(request_word, api_key, max_definitions=5): if len(request_word.split(" ")) > 1: request_word = request_word.replace(" ", "%20") data = query_request( "www.dictionaryapi.com", "/api/v3/references/collegiate/json/{t}?key={k}".format( t=request_word, k=api_key ), ) if not any(isinstance(x, dict) for x in data): return errorEmbedBuilder( "Couldn't define: *" + request_word + "* " + "did you mean: " + ", ".join(data), "Webster Dictionary", ) general_debug("Webster Dictionary is: " + str(data)) em = Embed(title="Webster Dictionary: " + data[0]["meta"]["id"], colour=0x2784EF) for element in range(min(max_definitions, len(data))): datum = data[element] em.add_field( name=datum["meta"]["id"] + "(" + datum["fl"] + ")", value=";\n".join(datum["shortdef"]), inline=False, ) general_info("Webster Dictionary created and returned embed object") return em
def thes_helper(request_word, api_key, max_definitions=7): if len(request_word.split(" ")) > 1: request_word = request_word.replace(" ", "%20") data = query_request( "www.dictionaryapi.com", "/api/v3/references/thesaurus/json/{t}?key={k}".format(t=request_word, k=api_key), ) if not any(isinstance(x, dict) for x in data): return errorEmbedBuilder( "Couldn't thesaurus: *" + request_word + "* " + "did you mean: " + ", ".join(data), "Webster Thesaurus", ) general_debug("Thesaurus lookup is: " + str(data)) syns = data[0]["meta"]["syns"][0] number_of_entries = min(len(syns), max_definitions) em = Embed( title="Thesaurus Entry: " + data[0]["meta"]["id"], colour=0xEDBE47, description=", ".join(syns[:number_of_entries]), ) general_info("Thesaurus Lookup created and returned embed object") return em
def aug_finder(word): auWord = False if word in augLibrary: auWord = augLibrary[word] else: auWord = random.choice([True, False]) augLibrary[word] = auWord with open("dict.json", "w") as f: f.write(json.dumps(augLibrary)) coinF = "is" if auWord else "is not" constructedString = "**{w}** __{c}__ aug und tier" general_debug("Aug und is: " + word + ":" + str(auWord)) general_info("Returned Au definition") return constructedString.format(c=coinF, w=word)
def wolf_short_query(submit_q, w_token): fixedReq = string_formatter(submit_q) data = query_request( "api.wolframalpha.com", "/v1/result?appid={key}&i={r_s}".format(key=w_token, r_s=fixedReq), raw_return=True, ) general_debug("Wolframalpha is: " + str(data)) constructed_string = "**{question}?**\n" "*{answer}*".format( question=submit_q, answer=data) em = Embed(title="Wolfram Alpha", description=constructed_string, colour=0xF12223) general_info("Wolfram created and returned embed object") return em
def reddit_top3(req_sub): if " " in req_sub: return errorEmbedBuilder("Subreddit can't have spaces", "Reddit") data = query_request( "www.reddit.com", "/r/{s}/top/.json?limit=3".format(s=req_sub), ) if data is False: return errorEmbedBuilder("Something went wrong when decoding", "Reddit") general_debug("Reddit is: " + str(data)) if "error" in data.keys(): if "reason" in data.keys(): return errorEmbedBuilder("Subreddit is " + data["reason"], "Reddit") return errorEmbedBuilder("Subreddit doesn't exist", "Reddit") tops = data["data"]["children"] if len(tops) < 1: return errorEmbedBuilder( "[/r/{0}]({1}) has less than 1 recent posts".format( req_sub, "https://www.reddit.com/r/" + req_sub), "Reddit", ) message = "" for i in range(0, len(tops)): message += (str(i + 1) + ". " + shortStringBuild( tops[i]["data"]["title"], tops[i]["data"]["url"], tops[i]["data"]["permalink"], )) em = Embed(title="Top posts of /r/" + req_sub, description=message, colour=0x0000FF) general_info("Reddit created and returned embed object") return em
def urbanDict_helper(request_word, char_lim=900): if len(request_word.split(" ")) > 1: request_word = request_word.replace(" ", "%20") data = query_request("api.urbandictionary.com", "/v0/define?term={stk}".format(stk=request_word)) definitions = data["list"] if len(definitions) == 0: return errorEmbedBuilder("Couldn't define: *" + request_word + "*", "Urban Dictionary") general_debug("Urban Dictionary is: " + str(definitions[0])) temp_def = definitions[0]["definition"] temp_example = definitions[0]["example"] temp_def = length_limiter(temp_def, char_lim) temp_example = length_limiter(temp_example, char_lim) em = Embed( title="Urban Dictionary: " + definitions[0]["word"], url=definitions[0]["permalink"], colour=0xEF8427, ) em.add_field( name="Definition", value=re.sub(bracketRemove, underLinesSub, temp_def, 0, re.MULTILINE), ) em.add_field( name="Example", value="*" + re.sub(bracketRemove, underLinesSub, temp_example, 0, re.MULTILINE) + "*", ) general_info("Urban Dictionary created and returned embed object") return em
def weather_helper( user_name, request_location: str, location_token, forecast_token, lat=None, lon=None ): if lat is None and lon is None: # Use geocoding to get lat/lon dataPayload = {"key": location_token, "q": request_location, "format": "json"} geo_response = payload_request(geocode_url, dataPayload) general_debug( "Location is: " + geo_response[0]["lat"] + "," + geo_response[0]["lon"] ) lon = geo_response[0]["lon"] lat = geo_response[0]["lat"] city_name = geo_response[0]["display_name"] user_library[str(user_name)] = {"lat": lat, "lon": lon, "city_name": city_name} with open("user_weather.json", "w") as f: f.write(json.dumps(user_library)) # Get the weather conditions for the day wea_response = query_request( "api.openweathermap.org", "/data/2.5/onecall?lat={latitude}&lon={longitude}&appid={apikey}&units=imperial".format( apikey=forecast_token, latitude=lat, longitude=lon ), ) general_info("Weather is: " + str(wea_response)) weather_f = wea_response["current"]["temp"] humidity = str(wea_response["current"]["humidity"]) + "%" dewpoint = wea_response["current"]["dew_point"] summary = wea_response["current"]["weather"][0]["description"].title() # Gather forecast summary information for the next two days. mainString = ( "It is currently **{temp}°F** " "with **{hum}** humidity " "and a dewpoint of **{dew}**°F\n" "A description of the weather: {sum}" ).format(temp=weather_f, dew=dewpoint, hum=humidity, sum=summary) # Output all of it em = Embed( title="Weather in " + user_library[user_name]["city_name"], description=mainString, colour=0x00FF00, ) days = ["Today", "Tomorrow", "Day After"] for i, day in enumerate(days): em.add_field( name=day + "'s forecast " + "(" + str(wea_response["daily"][i]["temp"]["max"]) + "/" + str(wea_response["daily"][i]["temp"]["min"]) + "):", value=emojiDict[wea_response["daily"][i]["weather"][0]["icon"]] + " " + wea_response["daily"][i]["weather"][0]["description"].title(), ) general_info("Weather created and returned embed object") return em