コード例 #1
0
def summarise_url():
	if not request.json:
		return { "error": "No json body found in request" }
	
	if "url" not in request.json:
		return { "error": "field url not found. Expected string" }

	url = request.json["url"]

	length = 5
	if "length" in request.json:
		length = request.json['length']

	scrapped_data = data_scrapping(url)
	if "error" in scrapped_data:
		return {"error": "Website does not allow scrapping"}

	k = []
	output = prediction(scrapped_data["output"], length) #length
	lang = get_lang(output[0])
	wikipedia.set_lang(lang) 
	keywords = word_extraction(str(output), lang) #TODO : get language
	for i in keywords:
		k.append(i[1])
	recommended_articles = search_on_wikipedia(keywords, lang)

	out = {
			"output": output, 
			"keywords": k,
			"recommended_articles": recommended_articles
		  }
	return out
コード例 #2
0
def summary():
	if not request.json:
		return { "error": "No json body found in request" }

	if "text" not in request.json:
		return { "error": "field text not found. Expected string" }

	length = 5
	if "length" in request.json:
		length = request.json['length']

	doc = request.json['text']
	
	k = []
	output = prediction(doc, length)
	lang = get_lang(output[0])
	wikipedia.set_lang(lang) 
	keywords = word_extraction(str(output), lang) # TODO : get language
	for i in keywords:
		k.append(i[1])
	recommended_articles = search_on_wikipedia(keywords, lang)

	out = {
			"output": output, 
			"keywords": k,
			"recommended_articles": recommended_articles
		  }
	return out
コード例 #3
0
ファイル: flask_solo.py プロジェクト: kabeer11000/ENCY-AI
def summarise_url():
    if not request.json:
        return {"error": "No json body found in request"}

    if "url" not in request.json:
        return {"error": "field url not found. Expected string"}

    url = request.json["url"]

    length = 5
    if "length" in request.json:
        length = request.json['length']

    scrapped_data = data_scrapping(url)
    if "error" in scrapped_data:
        return {"error": "Website does not allow scrapping"}

    output = prediction(scrapped_data["output"], length)  #length
    keywords = word_extraction(str(output))
    recommended_articles = search_on_wikipedia(keywords)

    out = {
        "output": output,
        "keywords": keywords,
        "recommended_articles": recommended_articles
    }
    return out
コード例 #4
0
def get_ka():
    if not request.json:
        return {"error": "No json body found in request"}

    if "text" not in request.json:
        return {"error": "field text not found. Expected string"}

    doc = request.json['text']
    k = []
    lang = get_lang(doc)
    wikipedia.set_lang(lang)
    keywords = word_extraction(str(doc), lang)  # TODO : get language
    for i in keywords:
        k.append(i[1])
    recommended_articles = search_on_wikipedia(keywords, lang)

    out = {"keywords": k, "recommended_articles": recommended_articles}
    return out
コード例 #5
0
ファイル: flask_solo.py プロジェクト: kabeer11000/ENCY-AI
def summary():
    if not request.json:
        return {"error": "No json body found in request"}

    if "text" not in request.json:
        return {"error": "field text not found. Expected string"}

    length = 5
    if "length" in request.json:
        length = request.json['length']

    doc = request.json['text']

    output = prediction(doc, length)
    keywords = word_extraction(output)
    recommended_articles = search_on_wikipedia(keywords)
    out = {
        "output": output,
        "keywords": keywords,
        "recommended_articles": recommended_articles
    }
    return out