def info_user(username): user = web.getJSON("https://api.github.com/users/%s" % quote(username)) user["repos"] = web.getJSON("https://api.github.com/users/%s/" "repos?sort=updated" % quote(username)) return user
def info_commit(repo, commit=None): rp = info_repos(repo) if rp["items"]: fullname = rp["items"][0]["full_name"] else: fullname = repo if commit is not None: return [web.getJSON("https://api.github.com/repos/%s/commits/%s" % (quote(fullname), quote(commit)))] else: return web.getJSON("https://api.github.com/repos/%s/commits" % quote(fullname))
def info_issue(repo, issue=None): rp = info_repos(repo) if rp["items"]: fullname = rp["items"][0]["full_name"] else: fullname = repo if issue is not None: return [web.getJSON("https://api.github.com/repos/%s/issues/%s" % (quote(fullname), quote(issue)))] else: return web.getJSON("https://api.github.com/repos/%s/issues?" "sort=updated" % quote(fullname))
def available(dom): js = getJSON(URL_AVAIL % urllib.parse.quote(dom)) if "ErrorMessage" in js: raise IMException(js["ErrorMessage"]["msg"]) return js["DomainInfo"]["domainAvailability"] == "AVAILABLE"
def search_hosts(query): url = BASEURL + "host/search?" + urllib.parse.urlencode( { 'query': query, 'key': context.config["apikey"] }) return web.getJSON(url, max_size=4194304)
def get_dhl_info(dhl_id, lang="en"): dhl_parcelurl = "http://www.dhl.com/shipmentTracking?" + urllib.parse.urlencode({'AWB': dhl_id}) dhl_data = getJSON(dhl_parcelurl) if "results" in dhl_data and dhl_data["results"]: return dhl_data["results"][0]
def get_movie(title=None, year=None, imdbid=None, fullplot=True, tomatoes=False): """Returns the information about the matching movie""" # Built URL url = "http://www.omdbapi.com/?" if title is not None: url += "t=%s&" % urllib.parse.quote(title) if year is not None: url += "y=%s&" % urllib.parse.quote(year) if imdbid is not None: url += "i=%s&" % urllib.parse.quote(imdbid) if fullplot: url += "plot=full&" if tomatoes: url += "tomatoes=true&" # Make the request data = web.getJSON(url) # Return data if "Error" in data: raise IMException(data["Error"]) elif "Response" in data and data["Response"] == "True": return data else: raise IMException("An error occurs during movie search")
def geocode(query, limit=7): obj = web.getJSON(URL_GEOCODE_API + urllib.parse.urlencode({ 'query': query, 'limit': limit, })) for f in obj["features"]: yield f["geometry"]["coordinates"], f["properties"]
def get_json_weather(coords, lang="en", units="ca"): wth = web.getJSON(URL_DSAPI % (float(coords[0]), float(coords[1]), lang, units)) # First read flags if wth is None or "darksky-unavailable" in wth["flags"]: raise IMException("The given location is supported but a temporary error (such as a radar station being down for maintenace) made data unavailable.") return wth
def get_unwikitextified(site, wikitext, ssl=False, path="/w/api.php"): # Built URL url = "http%s://%s%s?format=json&action=expandtemplates&text=%s" % ( "s" if ssl else "", site, path, urllib.parse.quote(wikitext)) # Make the request data = web.getJSON(url) return data["expandtemplates"]["*"]
def get_unwikitextified(site, wikitext, ssl=False): # Built URL url = "http%s://%s/w/api.php?format=json&action=expandtemplates&text=%s" % ( "s" if ssl else "", site, urllib.parse.quote(wikitext)) # Make the request data = web.getJSON(url) return data["expandtemplates"]["*"]
def do_search(terms): if "!safeoff" in terms: terms.remove("!safeoff") safeoff = True else: safeoff = False sterm = " ".join(terms) return DDGResult(sterm, web.getJSON( "https://api.duckduckgo.com/?q=%s&format=json&no_redirect=1%s" % (quote(sterm), "&kp=-1" if safeoff else "")))
def get_json_weather(coords, lang="en", units="ca"): wth = web.getJSON(URL_DSAPI % (float(coords[0]), float(coords[1]), lang, units)) # First read flags if wth is None or "darksky-unavailable" in wth["flags"]: raise IMException( "The given location is supported but a temporary error (such as a radar station being down for maintenace) made data unavailable." ) return wth
def opensearch(site, term, ssl=False, path="/w/api.php"): # Built URL url = "http%s://%s%s?format=json&action=opensearch&search=%s" % ( "s" if ssl else "", site, path, urllib.parse.quote(term)) # Make the request response = web.getJSON(url) if response is not None and len(response) >= 4: for k in range(len(response[1])): yield (response[1][k], response[2][k], response[3][k])
def cmd_snap(msg): images = getJSON("https://ohsnap.p0m.fr/api/images") if "errmsg" in images: raise IMException(images["errmsg"]) res = Response(channel=msg.channel, nomore="No more snap to show") for image in images: res.append_message( "Snap de {author}, le {upload_time} : https://ohsnap.p0m.fr/{hash}" .format(**image)) return res
def search(site, term, ssl=False): # Built URL url = "http%s://%s/w/api.php?format=json&action=query&list=search&srsearch=%s&srprop=titlesnippet|snippet" % ( "s" if ssl else "", site, urllib.parse.quote(term)) # Make the request data = web.getJSON(url) if data is not None and "query" in data and "search" in data["query"]: for itm in data["query"]["search"]: yield (web.striphtml(itm["titlesnippet"].replace("<span class='searchmatch'>", "\x03\x02").replace("</span>", "\x03\x02")), web.striphtml(itm["snippet"].replace("<span class='searchmatch'>", "\x03\x02").replace("</span>", "\x03\x02")))
def get_namespaces(site, ssl=False): # Built URL url = "http%s://%s/w/api.php?format=json&action=query&meta=siteinfo&siprop=namespaces" % ( "s" if ssl else "", site) # Make the request data = web.getJSON(url) namespaces = dict() for ns in data["query"]["namespaces"]: namespaces[data["query"]["namespaces"][ns]["*"]] = data["query"]["namespaces"][ns] return namespaces
def cmd_whois(msg): if not len(msg.args): raise IMException("Indiquer un domaine ou une IP à whois !") dom = msg.args[0] js = getJSON(URL_WHOIS % urllib.parse.quote(dom)) if "ErrorMessage" in js: raise IMException(js["ErrorMessage"]["msg"]) whois = js["WhoisRecord"] res = [] if "registrarName" in whois: res.append("\x03\x02registered by\x03\x02 " + whois["registrarName"]) if "domainAvailability" in whois: res.append(whois["domainAvailability"]) if "contactEmail" in whois: res.append("\x03\x02contact email\x03\x02 " + whois["contactEmail"]) if "audit" in whois: if "createdDate" in whois["audit"] and "$" in whois["audit"]["createdDate"]: res.append("\x03\x02created on\x03\x02 " + whois["audit"]["createdDate"]["$"]) if "updatedDate" in whois["audit"] and "$" in whois["audit"]["updatedDate"]: res.append("\x03\x02updated on\x03\x02 " + whois["audit"]["updatedDate"]["$"]) if "registryData" in whois: if "expiresDateNormalized" in whois["registryData"]: res.append("\x03\x02expire on\x03\x02 " + whois["registryData"]["expiresDateNormalized"]) if "registrant" in whois["registryData"]: res.append("\x03\x02registrant:\x03\x02 " + whois_entityformat(whois["registryData"]["registrant"])) if "zoneContact" in whois["registryData"]: res.append("\x03\x02zone contact:\x03\x02 " + whois_entityformat(whois["registryData"]["zoneContact"])) if "technicalContact" in whois["registryData"]: res.append( "\x03\x02technical contact:\x03\x02 " + whois_entityformat(whois["registryData"]["technicalContact"]) ) if "administrativeContact" in whois["registryData"]: res.append( "\x03\x02administrative contact:\x03\x02 " + whois_entityformat(whois["registryData"]["administrativeContact"]) ) if "billingContact" in whois["registryData"]: res.append( "\x03\x02billing contact:\x03\x02 " + whois_entityformat(whois["registryData"]["billingContact"]) ) return Response(res, title=whois["domainName"], channel=msg.channel, nomore="No more whois information")
def opensearch(site, term, ssl=False): # Built URL url = "http%s://%s/w/api.php?format=json&action=opensearch&search=%s" % ( "s" if ssl else "", site, urllib.parse.quote(term)) # Make the request response = web.getJSON(url) if response is not None and len(response) >= 4: for k in range(len(response[1])): yield (response[1][k], response[2][k], response[3][k])
def do_search(terms): if "!safeoff" in terms: terms.remove("!safeoff") safeoff = True else: safeoff = False sterm = " ".join(terms) return DDGResult( sterm, web.getJSON( "https://api.duckduckgo.com/?q=%s&format=json&no_redirect=1%s" % (quote(sterm), "&kp=-1" if safeoff else "")))
def get_namespaces(site, ssl=False, path="/w/api.php"): # Built URL url = "http%s://%s%s?format=json&action=query&meta=siteinfo&siprop=namespaces" % ( "s" if ssl else "", site, path) # Make the request data = web.getJSON(url) namespaces = dict() for ns in data["query"]["namespaces"]: namespaces[data["query"]["namespaces"][ns] ["*"]] = data["query"]["namespaces"][ns] return namespaces
def get_english_synos(key, word): cnt = web.getJSON("https://words.bighugelabs.com/api/2/%s/%s/json" % (quote(key), quote(word.encode("ISO-8859-1")))) best = list(); synos = list(); anton = list() if cnt is not None: for k, c in cnt.items(): if "syn" in c: best += c["syn"] if "rel" in c: synos += c["rel"] if "ant" in c: anton += c["ant"] return (best, synos, anton)
def get_raw_page(site, term, ssl=False, path="/w/api.php"): # Built URL url = "http%s://%s%s?format=json&redirects&action=query&prop=revisions&rvprop=content&titles=%s" % ( "s" if ssl else "", site, path, urllib.parse.quote(term)) # Make the request data = web.getJSON(url) for k in data["query"]["pages"]: try: return data["query"]["pages"][k]["revisions"][0]["*"] except: raise IMException("article not found")
def get_raw_page(site, term, ssl=False): # Built URL url = "http%s://%s/w/api.php?format=json&redirects&action=query&prop=revisions&rvprop=content&titles=%s" % ( "s" if ssl else "", site, urllib.parse.quote(term)) # Make the request data = web.getJSON(url) for k in data["query"]["pages"]: try: return data["query"]["pages"][k]["revisions"][0]["*"] except: raise IMException("article not found")
def get_english_synos(key, word): cnt = web.getJSON("http://words.bighugelabs.com/api/2/%s/%s/json" % (quote(key), quote(word.encode("ISO-8859-1")))) best = list(); synos = list(); anton = list() if cnt is not None: for k, c in cnt.items(): if "syn" in c: best += c["syn"] if "rel" in c: synos += c["rel"] if "ant" in c: anton += c["ant"] return (best, synos, anton)
def cmd_subreddit(msg): global LAST_SUBS if not len(msg.args): if msg.channel in LAST_SUBS and len(LAST_SUBS[msg.channel]) > 0: subs = [LAST_SUBS[msg.channel].pop()] else: raise IMException("Which subreddit? Need inspiration? " "type !horny or !bored") else: subs = msg.args all_res = list() for osub in subs: sub = re.match(r"^/?(?:(\w)/)?(\w+)/?$", osub) if sub is not None: if sub.group(1) is not None and sub.group(1) != "": where = sub.group(1) else: where = "r" sbr = web.getJSON("https://www.reddit.com/%s/%s/about.json" % (where, sub.group(2))) if sbr is None: raise IMException("subreddit not found") if "title" in sbr["data"]: res = Response(channel=msg.channel, nomore="No more information") res.append_message( ("[NSFW] " if sbr["data"]["over18"] else "") + sbr["data"]["url"] + " " + sbr["data"]["title"] + ": " + sbr["data"] ["public_description" if sbr["data"]["public_description"] != "" else "description"].replace("\n", " ") + " %s subscriber(s)" % sbr["data"]["subscribers"]) if sbr["data"]["public_description"] != "": res.append_message(sbr["data"]["description"].replace( "\n", " ")) all_res.append(res) else: all_res.append( Response("/%s/%s doesn't exist" % (where, sub.group(2)), channel=msg.channel)) else: all_res.append( Response("%s is not a valid subreddit" % osub, channel=msg.channel, nick=msg.frm)) return all_res
def isup(url): """Determine if the given URL is up or not Argument: url -- the URL to check """ o = urllib.parse.urlparse(getNormalizedURL(url), "http") if o.netloc != "": isup = getJSON("http://isitup.org/%s.json" % o.netloc) if isup is not None and "status_code" in isup and isup["status_code"] == 1: return isup["response_time"] return None
def _ticker(to_server, to_channel, **kwargs): global last_seen context.add_event( ModuleEvent(call=partial(_ticker, to_server, to_channel, **kwargs), interval=42)) last = getJSON("https://ohsnap.p0m.fr/api/images/last") if last["hash"] != last_seen: if last_seen is not None: context.send_response( to_server, Response( "Nouveau snap de {author} : https://ohsnap.p0m.fr/{hash}". format(**last), channel=to_channel)) last_seen = last["hash"]
def cmd_tpb(msg): if not len(msg.args): raise IMException("indicate an item to search!") torrents = getJSON(URL_TPBAPI + urllib.parse.quote(" ".join(msg.args))) res = Response(channel=msg.channel, nomore="No more torrents", count=" (%d more torrents)") if torrents: for t in torrents: t["sizeH"] = human.size(t["size"]) t["dateH"] = datetime.fromtimestamp(t["date"]).strftime('%Y-%m-%d %H:%M:%S') res.append_message("\x03\x02{title}\x03\x02 in {category}, {sizeH}; added at {dateH}; id: {id}; magnet:?xt=urn:btih:{magnet}&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=udp%3A%2F%2Ftracker.publicbt.com%3A80&tr=udp%3A%2F%2Ftracker.istole.it%3A6969&tr=udp%3A%2F%2Fopen.demonii.com%3A1337".format(**t)) return res
def get_ups_info(track_id): data = json.dumps({'Locale': "en_US", 'TrackingNumber': [track_id]}) track_baseurl = "https://www.ups.com/track/api/Track/GetStatus?loc=en_US" track_data = getJSON(track_baseurl, data.encode('utf-8'), header={"Content-Type": "application/json"}) return ( track_data["trackDetails"][0]["trackingNumber"], track_data["trackDetails"][0]["packageStatus"], track_data["trackDetails"][0]["shipmentProgressActivities"][0]["date"] + " " + track_data["trackDetails"][0]["shipmentProgressActivities"][0]["time"], track_data["trackDetails"][0]["shipmentProgressActivities"][0] ["location"], track_data["trackDetails"][0] ["shipmentProgressActivities"][0]["activityScan"])
def isup(url): """Determine if the given URL is up or not Argument: url -- the URL to check """ o = urllib.parse.urlparse(getNormalizedURL(url), "http") if o.netloc != "": isup = getJSON("https://isitup.org/%s.json" % o.netloc) if isup is not None and "status_code" in isup and isup[ "status_code"] == 1: return isup["response_time"] return None
def search(site, term, ssl=False, path="/w/api.php"): # Built URL url = "http%s://%s%s?format=json&action=query&list=search&srsearch=%s&srprop=titlesnippet|snippet" % ( "s" if ssl else "", site, path, urllib.parse.quote(term)) # Make the request data = web.getJSON(url) if data is not None and "query" in data and "search" in data["query"]: for itm in data["query"]["search"]: yield (web.striphtml(itm["titlesnippet"].replace( "<span class='searchmatch'>", "\x03\x02").replace("</span>", "\x03\x02")), web.striphtml(itm["snippet"].replace( "<span class='searchmatch'>", "\x03\x02").replace("</span>", "\x03\x02")))
def cmd_subreddit(msg): global LAST_SUBS if not len(msg.args): if msg.channel in LAST_SUBS and len(LAST_SUBS[msg.channel]) > 0: subs = [LAST_SUBS[msg.channel].pop()] else: raise IMException("Which subreddit? Need inspiration? " "type !horny or !bored") else: subs = msg.args all_res = list() for osub in subs: sub = re.match(r"^/?(?:(\w)/)?(\w+)/?$", osub) if sub is not None: if sub.group(1) is not None and sub.group(1) != "": where = sub.group(1) else: where = "r" sbr = web.getJSON("http://www.reddit.com/%s/%s/about.json" % (where, sub.group(2))) if sbr is None: raise IMException("subreddit not found") if "title" in sbr["data"]: res = Response(channel=msg.channel, nomore="No more information") res.append_message( ("[NSFW] " if sbr["data"]["over18"] else "") + sbr["data"]["url"] + " " + sbr["data"]["title"] + ": " + sbr["data"]["public_description" if sbr["data"]["public_description"] != "" else "description"].replace("\n", " ") + " %s subscriber(s)" % sbr["data"]["subscribers"]) if sbr["data"]["public_description"] != "": res.append_message( sbr["data"]["description"].replace("\n", " ")) all_res.append(res) else: all_res.append(Response("/%s/%s doesn't exist" % (where, sub.group(2)), channel=msg.channel)) else: all_res.append(Response("%s is not a valid subreddit" % osub, channel=msg.channel, nick=msg.nick)) return all_res
def find_movies(title, year=None): """Find existing movies matching a approximate title""" title = title.lower() # Built URL url = "https://v2.sg.media-imdb.com/suggests/%s/%s.json" % (urllib.parse.quote(title[0]), urllib.parse.quote(title.replace(" ", "_"))) # Make the request data = web.getJSON(url, remove_callback=True) if "d" not in data: return None elif year is None: return data["d"] else: return [d for d in data["d"] if "y" in d and str(d["y"]) == year]
def find_movies(title): """Find existing movies matching a approximate title""" # Built URL url = "http://www.omdbapi.com/?s=%s" % urllib.parse.quote(title) # Make the request data = web.getJSON(url) # Return data if "Error" in data: raise IMException(data["Error"]) elif "Search" in data: return data else: raise IMException("An error occurs during movie search")
def find_movies(title, year=None): """Find existing movies matching a approximate title""" title = title.lower() # Built URL url = "https://v2.sg.media-imdb.com/suggests/%s/%s.json" % ( urllib.parse.quote( title[0]), urllib.parse.quote(title.replace(" ", "_"))) # Make the request data = web.getJSON(url, remove_callback=True) if "d" not in data: return None elif year is None: return data["d"] else: return [d for d in data["d"] if "y" in d and str(d["y"]) == year]
def get_fedex_info(fedex_id, lang="en_US"): data = urllib.parse.urlencode({ 'data': json.dumps({ "TrackPackagesRequest": { "appType": "WTRK", "appDeviceType": "DESKTOP", "uniqueKey": "", "processingParameters": {}, "trackingInfoList": [{ "trackNumberInfo": { "trackingNumber": str(fedex_id), "trackingQualifier": "", "trackingCarrier": "" } }] } }), 'action': "trackpackages", 'locale': lang, 'version': 1, 'format': "json" }) fedex_baseurl = "https://www.fedex.com/trackingCal/track" fedex_data = getJSON(fedex_baseurl, data.encode('utf-8')) if ("TrackPackagesResponse" in fedex_data and "packageList" in fedex_data["TrackPackagesResponse"] and len(fedex_data["TrackPackagesResponse"]["packageList"]) and (not fedex_data["TrackPackagesResponse"]["errorList"][0]["code"] or fedex_data["TrackPackagesResponse"]["errorList"][0]["code"] == '0') and not fedex_data["TrackPackagesResponse"]["packageList"][0] ["errorList"][0]["code"]): return fedex_data["TrackPackagesResponse"]["packageList"][0]
def translate(term, langFrom="en", langTo="fr"): wres = web.getJSON(URL % (langFrom, langTo, quote(term))) if "Error" in wres: raise IMException(wres["Note"]) else: for k in sorted(wres.keys()): t = wres[k] if len(k) > 4 and k[:4] == "term": if "Entries" in t: ent = t["Entries"] else: ent = t["PrincipalTranslations"] for i in sorted(ent.keys()): yield "Translation of %s%s: %s" % ( ent[i]["OriginalTerm"]["term"], meaning(ent[i]["OriginalTerm"]), extract_traslation(ent[i]))
def cmd_tpb(msg): if not len(msg.args): raise IMException("indicate an item to search!") torrents = getJSON(URL_TPBAPI + urllib.parse.quote(" ".join(msg.args))) res = Response(channel=msg.channel, nomore="No more torrents", count=" (%d more torrents)") if torrents: for t in torrents: t["sizeH"] = human.size(t["size"]) t["dateH"] = datetime.fromtimestamp( t["date"]).strftime('%Y-%m-%d %H:%M:%S') res.append_message( "\x03\x02{title}\x03\x02 in {category}, {sizeH}; added at {dateH}; id: {id}; magnet:?xt=urn:btih:{magnet}&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=udp%3A%2F%2Ftracker.publicbt.com%3A80&tr=udp%3A%2F%2Ftracker.istole.it%3A6969&tr=udp%3A%2F%2Fopen.demonii.com%3A1337" .format(**t)) return res
def get_laposte_info(laposte_id): status, laposte_headers = getURLHeaders( "https://www.laposte.fr/outils/suivre-vos-envois?" + urllib.parse.urlencode({'code': laposte_id})) laposte_cookie = None for k, v in laposte_headers: if k.lower() == "set-cookie" and v.find("access_token") >= 0: laposte_cookie = v.split(";")[0] laposte_data = getJSON( "https://api.laposte.fr/ssu/v1/suivi-unifie/idship/%s?lang=fr_FR" % urllib.parse.quote(laposte_id), header={ "Accept": "application/json", "Cookie": laposte_cookie }) shipment = laposte_data["shipment"] return (shipment["product"], shipment["idShip"], shipment["event"][0]["label"], shipment["event"][0]["date"])
def get_fedex_info(fedex_id, lang="en_US"): data = urllib.parse.urlencode({ 'data': json.dumps({ "TrackPackagesRequest": { "appType": "WTRK", "appDeviceType": "DESKTOP", "uniqueKey": "", "processingParameters": {}, "trackingInfoList": [ { "trackNumberInfo": { "trackingNumber": str(fedex_id), "trackingQualifier": "", "trackingCarrier": "" } } ] } }), 'action': "trackpackages", 'locale': lang, 'version': 1, 'format': "json" }) fedex_baseurl = "https://www.fedex.com/trackingCal/track" fedex_data = getJSON(fedex_baseurl, data.encode('utf-8')) if ("TrackPackagesResponse" in fedex_data and "packageList" in fedex_data["TrackPackagesResponse"] and len(fedex_data["TrackPackagesResponse"]["packageList"]) and (not fedex_data["TrackPackagesResponse"]["errorList"][0]["code"] or fedex_data["TrackPackagesResponse"]["errorList"][0]["code"] == '0') and not fedex_data["TrackPackagesResponse"]["packageList"][0]["errorList"][0]["code"] ): return fedex_data["TrackPackagesResponse"]["packageList"][0]
def directions(coordinates, **kwargs): kwargs['coordinates'] = '|'.join(coordinates) print(URL_DIRECTIONS_API + urllib.parse.urlencode(kwargs)) return web.getJSON(URL_DIRECTIONS_API + urllib.parse.urlencode(kwargs), decode_error=True)
def geocode(location): obj = web.getJSON(URL_API % quote(location)) if "results" in obj and "locations" in obj["results"][0]: for loc in obj["results"][0]["locations"]: yield loc
def cmd_smmry(msg): if not len(msg.args): global LAST_URLS if msg.channel in LAST_URLS and len(LAST_URLS[msg.channel]) > 0: msg.args.append(LAST_URLS[msg.channel].pop()) else: raise IMException("I have no more URL to sum up.") URL = URL_API if "length" in msg.kwargs: if int(msg.kwargs["length"]) > 0 : URL += "&SM_LENGTH=" + msg.kwargs["length"] else: msg.kwargs["ignore_length"] = True if "break" in msg.kwargs: URL += "&SM_WITH_BREAK" if "ignore_length" in msg.kwargs: URL += "&SM_IGNORE_LENGTH" if "quote_avoid" in msg.kwargs: URL += "&SM_QUOTE_AVOID" if "question_avoid" in msg.kwargs: URL += "&SM_QUESTION_AVOID" if "exclamation_avoid" in msg.kwargs: URL += "&SM_EXCLAMATION_AVOID" if "keywords" in msg.kwargs and msg.kwargs["keywords"] is not None and int(msg.kwargs["keywords"]) > 0: URL += "&SM_KEYWORD_COUNT=" + msg.kwargs["keywords"] res = Response(channel=msg.channel) if web.isURL(" ".join(msg.args)): smmry = web.getJSON(URL + "&SM_URL=" + quote(" ".join(msg.args)), timeout=23) else: cnt = "" for r in context.subtreat(context.subparse(msg, " ".join(msg.args))): if isinstance(r, Response): for i in range(len(r.messages) - 1, -1, -1): if isinstance(r.messages[i], list): for j in range(len(r.messages[i]) - 1, -1, -1): cnt += r.messages[i][j] + "\n" elif isinstance(r.messages[i], str): cnt += r.messages[i] + "\n" else: cnt += str(r.messages) + "\n" elif isinstance(r, Text): cnt += r.message + "\n" else: cnt += str(r) + "\n" smmry = web.getJSON(URL, body="sm_api_input=" + quote(cnt), timeout=23) if "sm_api_error" in smmry: if smmry["sm_api_error"] == 0: title = "Internal server problem (not your fault)" elif smmry["sm_api_error"] == 1: title = "Incorrect submission variables" elif smmry["sm_api_error"] == 2: title = "Intentional restriction (low credits?)" elif smmry["sm_api_error"] == 3: title = "Summarization error" else: title = "Unknown error" raise IMException(title + ": " + smmry['sm_api_message'].lower()) if "keywords" in msg.kwargs: smmry["sm_api_content"] = ", ".join(smmry["sm_api_keyword_array"]) if "sm_api_title" in smmry and smmry["sm_api_title"] != "": res.append_message(smmry["sm_api_content"], title=smmry["sm_api_title"]) else: res.append_message(smmry["sm_api_content"]) return res
def info_repos(repo): return web.getJSON("https://api.github.com/search/repositories?q=%s" % quote(repo))
def search(terms): return web.getJSON( "http://api.urbandictionary.com/v0/define?term=%s" % quote(' '.join(terms)))
def virtual_radar(flight_call): obj = web.getJSON(URL_API % quote(flight_call)) if "acList" in obj: for flight in obj["acList"]: yield flight
def cmd_whois(msg): if not len(msg.args): raise IMException("Indiquer un domaine ou une IP à whois !") dom = msg.args[0] js = getJSON(URL_WHOIS % urllib.parse.quote(dom)) if "ErrorMessage" in js: raise IMException(js["ErrorMessage"]["msg"]) whois = js["WhoisRecord"] res = [] if "registrarName" in whois: res.append("\x03\x02registered by\x03\x02 " + whois["registrarName"]) if "domainAvailability" in whois: res.append(whois["domainAvailability"]) if "contactEmail" in whois: res.append("\x03\x02contact email\x03\x02 " + whois["contactEmail"]) if "audit" in whois: if "createdDate" in whois["audit"] and "$" in whois["audit"][ "createdDate"]: res.append("\x03\x02created on\x03\x02 " + whois["audit"]["createdDate"]["$"]) if "updatedDate" in whois["audit"] and "$" in whois["audit"][ "updatedDate"]: res.append("\x03\x02updated on\x03\x02 " + whois["audit"]["updatedDate"]["$"]) if "registryData" in whois: if "expiresDateNormalized" in whois["registryData"]: res.append("\x03\x02expire on\x03\x02 " + whois["registryData"]["expiresDateNormalized"]) if "registrant" in whois["registryData"]: res.append("\x03\x02registrant:\x03\x02 " + whois_entityformat(whois["registryData"]["registrant"])) if "zoneContact" in whois["registryData"]: res.append( "\x03\x02zone contact:\x03\x02 " + whois_entityformat(whois["registryData"]["zoneContact"])) if "technicalContact" in whois["registryData"]: res.append( "\x03\x02technical contact:\x03\x02 " + whois_entityformat(whois["registryData"]["technicalContact"])) if "administrativeContact" in whois["registryData"]: res.append("\x03\x02administrative contact:\x03\x02 " + whois_entityformat(whois["registryData"] ["administrativeContact"])) if "billingContact" in whois["registryData"]: res.append( "\x03\x02billing contact:\x03\x02 " + whois_entityformat(whois["registryData"]["billingContact"])) return Response(res, title=whois["domainName"], channel=msg.channel, nomore="No more whois information")
def search_hosts(query): url = BASEURL + "host/search?" + urllib.parse.urlencode({'query': query, 'key': context.config["apikey"]}) return web.getJSON(url, max_size=4194304)
def host_lookup(ip): url = BASEURL + "host/" + urllib.parse.quote(ip) + "?" + urllib.parse.urlencode({'key': context.config["apikey"]}) return web.getJSON(url)
def cmd_smmry(msg): if not len(msg.args): global LAST_URLS if msg.channel in LAST_URLS and len(LAST_URLS[msg.channel]) > 0: msg.args.append(LAST_URLS[msg.channel].pop()) else: raise IMException("I have no more URL to sum up.") URL = URL_API if "length" in msg.kwargs: if int(msg.kwargs["length"]) > 0: URL += "&SM_LENGTH=" + msg.kwargs["length"] else: msg.kwargs["ignore_length"] = True if "break" in msg.kwargs: URL += "&SM_WITH_BREAK" if "ignore_length" in msg.kwargs: URL += "&SM_IGNORE_LENGTH" if "quote_avoid" in msg.kwargs: URL += "&SM_QUOTE_AVOID" if "question_avoid" in msg.kwargs: URL += "&SM_QUESTION_AVOID" if "exclamation_avoid" in msg.kwargs: URL += "&SM_EXCLAMATION_AVOID" if "keywords" in msg.kwargs and msg.kwargs["keywords"] is not None and int( msg.kwargs["keywords"]) > 0: URL += "&SM_KEYWORD_COUNT=" + msg.kwargs["keywords"] res = Response(channel=msg.channel) if web.isURL(" ".join(msg.args)): smmry = web.getJSON(URL + "&SM_URL=" + quote(" ".join(msg.args)), timeout=23) else: cnt = "" for r in context.subtreat(context.subparse(msg, " ".join(msg.args))): if isinstance(r, Response): for i in range(len(r.messages) - 1, -1, -1): if isinstance(r.messages[i], list): for j in range(len(r.messages[i]) - 1, -1, -1): cnt += r.messages[i][j] + "\n" elif isinstance(r.messages[i], str): cnt += r.messages[i] + "\n" else: cnt += str(r.messages) + "\n" elif isinstance(r, Text): cnt += r.message + "\n" else: cnt += str(r) + "\n" smmry = web.getJSON(URL, body="sm_api_input=" + quote(cnt), timeout=23) if "sm_api_error" in smmry: if smmry["sm_api_error"] == 0: title = "Internal server problem (not your fault)" elif smmry["sm_api_error"] == 1: title = "Incorrect submission variables" elif smmry["sm_api_error"] == 2: title = "Intentional restriction (low credits?)" elif smmry["sm_api_error"] == 3: title = "Summarization error" else: title = "Unknown error" raise IMException(title + ": " + smmry['sm_api_message'].lower()) if "keywords" in msg.kwargs: smmry["sm_api_content"] = ", ".join(smmry["sm_api_keyword_array"]) if "sm_api_title" in smmry and smmry["sm_api_title"] != "": res.append_message(smmry["sm_api_content"], title=smmry["sm_api_title"]) else: res.append_message(smmry["sm_api_content"]) return res