def summarise_url(): if not request.json: return { "error": "No json body found in request" } if "url" not in request.json: return { "error": "field url not found. Expected string" } url = request.json["url"] length = 5 if "length" in request.json: length = request.json['length'] scrapped_data = data_scrapping(url) if "error" in scrapped_data: return {"error": "Website does not allow scrapping"} k = [] output = prediction(scrapped_data["output"], length) #length lang = get_lang(output[0]) wikipedia.set_lang(lang) keywords = word_extraction(str(output), lang) #TODO : get language for i in keywords: k.append(i[1]) recommended_articles = search_on_wikipedia(keywords, lang) out = { "output": output, "keywords": k, "recommended_articles": recommended_articles } return out
def summarise_url(): if not request.json: return {"error": "No json body found in request"} if "url" not in request.json: return {"error": "field url not found. Expected string"} url = request.json["url"] length = 5 if "length" in request.json: length = request.json['length'] scrapped_data = data_scrapping(url) if "error" in scrapped_data: return {"error": "Website does not allow scrapping"} output = prediction(scrapped_data["output"], length) #length keywords = word_extraction(str(output)) recommended_articles = search_on_wikipedia(keywords) out = { "output": output, "keywords": keywords, "recommended_articles": recommended_articles } return out
def summary(): if not request.json: return { "error": "No json body found in request" } if "text" not in request.json: return { "error": "field text not found. Expected string" } length = 5 if "length" in request.json: length = request.json['length'] doc = request.json['text'] k = [] output = prediction(doc, length) lang = get_lang(output[0]) wikipedia.set_lang(lang) keywords = word_extraction(str(output), lang) # TODO : get language for i in keywords: k.append(i[1]) recommended_articles = search_on_wikipedia(keywords, lang) out = { "output": output, "keywords": k, "recommended_articles": recommended_articles } return out
def aiTips(): if not request.json: return { "error": "No json body found in request" } if "word" not in request.json: return { "error": "field text not found. Expected string" } doc = request.json['word'] doc = str(doc) doc = doc.replace('_', ' ') out = {} word = doc if len(word.split())==1: try: definition, lang = get_def(word) wikipedia.set_lang(lang) except: definition = "" lang = get_lang(word) out["definition"] = definition else: lang = get_lang(word) recommended_articles = wikipedia.search(word) websites_url = [] for c in recommended_articles: article = c article = article.replace(' ', '_') tmp = "https://" + lang + '.wikipedia.org/wiki/' + article websites_url.append(tmp) #print(scrapped_data["output"]) try: k = [] get_first = recommended_articles[0] url = 'https://' + lang + '.wikipedia.org/wiki/' + get_first #print(url) scrapped_data = data_scrapping(url) output = prediction(scrapped_data["output"], 3) if "error" in scrapped_data: return {"error": "Website does not allow scrapping"} keywords = word_extraction(str(output), lang) for i in keywords: k.append(i[1]) except: output = None keywords = [] recommended_articles = [] #recommended_articles = search_on_wikipedia(keywords) out[word] = { "output": output, "keywords": k, "recommended_articles": websites_url } return out
def summary(): if not request.json: return {"error": "No json body found in request"} if "text" not in request.json: return {"error": "field text not found. Expected string"} length = 5 if "length" in request.json: length = request.json['length'] doc = request.json['text'] output = prediction(doc, length) keywords = word_extraction(output) recommended_articles = search_on_wikipedia(keywords) out = { "output": output, "keywords": keywords, "recommended_articles": recommended_articles } return out