def launch_stop(): try: data = callrest(domain=SMALLNABZ_DOMAIN, port=SMALLNABZ_PORT, path="/stop", params={})[2] return "" except Exception as e: print (e) return "Erreur"
def launch_chuck(params): try: data = callrest(domain=CHUCK_DOMAIN, path=CHUCK_PATH)[2] return html.unescape(json.loads(data)[0].get("fact")) except Exception as e: print (e) return "Erreur"
def launch_play(params): try: data = callrest(domain=SMALLNABZ_DOMAIN, port=SMALLNABZ_PORT, path="/play", params=params)[2] return "" except Exception as e: print (e) return "Erreur"
def get_reddit(subreddit): try: data = callrest(domain="www.reddit.com", port=443, ssl=True, path="/r/{0}/new/.json".format(subreddit), params={})[2] return random.choice(json.loads(data).get("data").get("children")).get("data") except Exception as e: print (e) return ("Oups", "Rien... "+subreddit)
def chat(kwargs): try: commande = kwargs['text'][0].split(' ')[1] if commande in commands: retour = commands[commande](kwargs) if retour != "" and retour is not None: if type(retour) is str: return build_response(kwargs, retour) else: # Impossible de retourner un message enrichie alors, on passe par l'API callrest(domain=MATTERMOST_DOMAIN, type="POST", path=MATTERMOST_PATH, params={"payload": json.dumps(retour)}) else: return build_response(kwargs, giphy.get_gyphy("".join(kwargs["text"][0].split(' ')[1:]))) except Exception as e: print (e) pass
def get_madame(): try: data = callrest(domain=MADAME_URL, port="80", path=MADAME_PATH, user_headers={"Accept-Charset": "utf-8"})[2] soup = BeautifulSoup(data, "html.parser") image = soup.find_all("div", class_="photo")[0].find("img")['src'] return return_md(image) except Exception as e: return ("Oups", "Rien... ")
def get_fuckmylife(): try: data = callrest(domain=FML_URL, port="80", path=FML_PATH, user_headers={"Accept-Charset": "utf-8"})[2] soup = BeautifulSoup(data, "html.parser") texte = soup.select("div.post.article")[0].p.text return return_md(texte) except Exception as e: print(e) return ("Oups", "Rien... ")
def get_joieducode(): try: data = callrest(domain=JOIESDUCODE_URL, port="80", path=JOIESDUCODE_PATH, user_headers={"Accept-Charset": "utf-8"})[2] soup = BeautifulSoup(data, "html.parser") titre = soup.find_all("h1")[0].string.replace("stagiaire", "@stagiaire") image = soup.find_all("div", class_="blog-post-content")[0].find("img")['src'] return return_md(titre.strip(), image) except Exception as e: print(e) return ("Oups", "Rien... ")
def get_lesjoiesdusysadmin(): try: data = callrest(domain=LESJOIESDUSYSADMIN_URL, port="80", path=LESJOIESDUSYSADMIN_PATH, user_headers={"Accept-Charset": "utf-8"})[2] soup = BeautifulSoup(data, "html.parser") titre = soup.select("div.content")[0].h3.span.text image = soup.find_all("div", class_="text")[0].find("img")['src'] return return_md(titre.strip(), image) except Exception as e: print(e) return ("Oups", "Rien... ")
def get_gyphy(keyword): try: params = {} params['api_key'] = GIPHY_API_KEY params['tag'] = keyword data = callrest(domain=GIPHY_URL, ssl=True, path=GIPHY_PATH, params=params, user_headers={"X-Mashape-Key": MASHAPE_KEY})[2] retour = json.loads(data) if len(retour['data']) == 0: return get_gyphy("") return return_md(retour['data']['image_original_url']) except Exception as e: print("Erreur ! {0}".format(e)) return ("Oups", "Rien... ")
def cmd_top10(msg): return_values = [] try: data = callrest(domain="www.reddit.com", port=443, ssl=True, path="/top/.json", params={"limit":10, "sort":"top", "t":"hour"})[2] data = json.loads(data).get("data").get("children") for element in data: try: return_values.append("{title} : {url}".format(**element["data"])) except: pass return "Top10 : \r\n- {0}".format("\r\n- ".join(return_values)) except Exception as e: raise Exception(e)
def get_redditlist(type_reddit="all"): cache_key = "redditlist_{0}".format(type_reddit) if cache_key not in cache: result = callrest(domain="redditlist.com", type="GET", path="/{0}".format(type_reddit), params={}) if result[0] == 200: cache[cache_key] = result[2] else: return get_reddit_random() soup = BeautifulSoup(cache[cache_key], "html.parser") links = soup.find_all("div", class_="listing-item") if len(links)>0: subReddit = random.choice(links).get("data-target-subreddit", "") return get_reddit(subReddit) else: return get_reddit_random()
def aurevoir(): params = {"username": PSEUDO, "text": "Au revoir {0}".format("https://www.youtube.com/watch?v=uIMBjES4B4g")} data = callrest(domain=MATTERMOST_DOMAIN, type="POST", path=MATTERMOST_PATH, params={"payload": json.dumps(params)})
def welcome(): params = {"username": PSEUDO, "attachments": [{"color": "#3c901a", "title": PSEUDO, "text":"System Ready !".format(PSEUDO)}]} data = callrest(domain=MATTERMOST_DOMAIN, type="POST", path=MATTERMOST_PATH, params={"payload": json.dumps(params)})
def get_jenkins_data(path): try: return callrest(domain=JENKINS_DOMAIN,port=JENKIN_PORT, path=path, params={})[2] except: return "{}"