def chat(kwargs): try: commande = kwargs['text'][0].split(' ') if len(commande) > 1: commande = commande[1] else: commande = commande[0] commande = commande.lower() kwargs["query"] = "".join(kwargs["text"][0].split(' ')[2:]) # Sauvegarde de l’historique add_history(pseudo=kwargs["user_name"][0], command="{0} {1}".format(commande, kwargs["query"])) if commande in commands: retour = commands[commande](kwargs) if retour != "" and retour is not None: if type(retour) is str: return build_response(kwargs, retour) else: # Impossible de retourner un message enrichie alors, on passe par l'API callrest(domain=MATTERMOST_DOMAIN, type="POST", path=MATTERMOST_PATH, params={"payload": json.dumps(retour)}) else: pass except Exception as e: logging.error(e) pass
def get_gyphy(keyword, md=True): ''' Éfféctue une recherche d’un gif via GIPHY :param keyword: Mot clef recheché :param md: Indique si le retour doit-être du type Markdown ''' try: params = {} params['api_key'] = GIPHY_API_KEY params['tag'] = keyword data = callrest(domain=GIPHY_URL, ssl=True, path=GIPHY_PATH, params=params, user_headers={"X-Mashape-Key": MASHAPE_KEY})[2] retour = json.loads(data) if len(retour['data']) == 0: # pragma: no cover return get_gyphy("") if md: return "![image]({0})".format(retour['data']['image_original_url']) else: return retour['data']['image_original_url'] except Exception as e: # pragma: no cover print(e) return None
def search_arround_me(query): type_recherche = ["park", "forest", "castle"] type_recherche_translate = { "park": "Parc", "forest": "Forêt", "castle": "Château" } retour_string = "Voilà la liste des lieux autours de vous :\n" no_result = True for rech in type_recherche: params = { "accept-language": "fr", "format": "json", "limit": 5, "addressdetails": 1, "q": "[{1}] {0}".format(query, rech) } data = callrest(domain="nominatim.openstreetmap.org", ssl=True, params=params, path="/search", type="GET")[2] data = json.loads(data) retour = [] for adress in data: try: # Récupération de la ville / village / code postal if "city" in adress["address"]: # pragma: no cover city = adress["address"]["city"] elif "town" in adress["address"]: # pragma: no cover city = adress["address"]["town"] elif "village" in adress["address"]: # pragma: no cover city = adress["address"]["village"] else: # pragma: no cover city = adress["address"]["postcode"] first_element = list(adress["address"].values())[0] if rech in adress["address"]: first_element = adress["address"][rech] # Récupération du « premier élément » comme Nom retour.append( "{0}, {1} [Voir](https://www.google.fr/maps/@{2},{3},18z)". format(first_element, city, adress["lat"], adress["lon"])) except Exception as e: # pragma: no cover raise (e) if retour: no_result = False retour_string = "{0} \n {1} : ".format( retour_string, type_recherche_translate[rech]) retour_string = retour_string + "\n - " + "\n - ".join( retour) + " \n " if not no_result: return retour_string else: # pragma: no cover return "Désolé, aucun résultat autour de votre position."
def get_reddit(subreddit): try: data = \ callrest(domain="www.reddit.com", port=443, ssl=True, path="/r/{0}/new/.json".format(subreddit), params={})[2] return random.choice( json.loads(data).get("data").get("children")).get("data") except Exception as e: # pragma: no cover print(e) return "Désolé, aucun résultat"
def get_redditlist(type_reddit="all"): cache_key = "redditlist_{0}".format(type_reddit) if cache_key not in cache: result = callrest(domain="redditlist.com", type="GET", path="/{0}".format(type_reddit), params={}) if result[0] == 200: cache[cache_key] = result[2] else: # pragma: no cover return get_reddit_random() soup = BeautifulSoup(cache[cache_key], "html.parser") links = soup.find_all("div", class_="listing-item") if len(links) > 0: subReddit = random.choice(links).get("data-target-subreddit", "") return get_reddit(subReddit) else: # pragma: no cover return get_reddit_random()
def cmd_aide(msg): """ Cherche la définition demandé par l’utilisateur :param msg: Objet qui correspond à la demande de l’utilisateur. """ if not msg["query"]: mark_for_awaiting_response(username_or_channel(msg), "def") return "Sur quel sujet ?" try: wikipedia.set_lang("fr") query = wikipedia.search(msg["query"], results=1).pop(0).replace(" ", "_") # En français la lib wikipedia ne fonctionne pas vraiment bien (exemple valentin est impossible à charger…) if query: params = { "format": "json", "action": "query", "prop": "extracts", "exintro": "", "explaintext": "", "titles": query } domain = "fr.wikipedia.org" retour = callrest(domain=domain, port="443", ssl=True, params=params, path="/w/api.php", user_headers={"Accept-Charset": "utf-8"}) retour = json.loads(retour[2]) page_id = list(retour["query"]["pages"]).pop(0) if page_id != "-1": page = retour["query"]["pages"][page_id] return "{0} \n\nhttps://{1}/?curid={2}".format( page.get("extract", ""), domain, page_id) else: raise Exception("KO") except Exception as e: # pragma: no cover if msg["query"]: return "Aucun résultat pour {0}".format(msg["query"])
def get_commitstrip(latest=False): """ Récupération d’un CommitStrip. Utilisation du flux RSS pour récupérer la liste des CommitStrip. :param latest: Retourne uniquement le Cs le plus récent """ try: # Récupération du flux CommitStrip data = callrest(domain="www.commitstrip.com", port="80", path="/fr/feed/")[2] soup = BeautifulSoup(data, "html.parser") liens = soup.select("item") if not latest: # Pas de flag latest on en prend un en aléatoire data = random.choice(liens).find("content:encoded").text else: # Uniquement le dernier data = liens[0].find("content:encoded").text # Get the image soup = BeautifulSoup(data, "html.parser") return soup.select("p > img")[0].attrs.get("src") except Exception as e: # pragma: no cover return "Impossible de récupérer les CommitStrip. {}".format(e)
def cmd_top10(msg): return_values = [] try: data = callrest(domain="www.reddit.com", port=443, ssl=True, path="/top/.json", params={ "limit": 10, "sort": "top", "t": "hour" })[2] data = json.loads(data).get("data").get("children") for element in data: try: return_values.append( "{title} : {url}".format(**element["data"])) except: # pragma: no cover pass return "Top10 : \r\n- {0}".format("\r\n- ".join(return_values)) except Exception as e: # pragma: no cover raise Exception(e)