def mention(): print("请求方式为:", request.method) mention = request.args.to_dict().get('mention', '') if mention == '': return "你可能忘记提问了, 试试/mention?mention=泡泡糖" print(mention) answers = mention2entity(mention) print(answers) return json.dumps(answers, ensure_ascii=False)
def show_demo(): print("请求方式为:", request.method) entity = request.args.to_dict().get('entity', '') # mention = request.args.to_dict().get('mention','') if entity == '': return "请确定要查询的功能!" elif "ad_social:" in entity: print(entity) line = entity.split(':') params = line[1].split(",") contents = advogato_data_KG_source('source', params[0], int(params[1])) print(contents) with open("static/data/entity_relationships.json", 'w') as data: json.dump(contents, data) # name = {"entity":entity, "level":params[1]} name = {} name['entity'] = str(params[0]) name['level'] = str(params[1]) print(name) return render_template('entity_relationships.html', contents=name) elif "mention:" in entity: print(entity) line = entity.strip().split(":") mention = line[1] contents = mention2entity(mention) print(contents) with open("static/data/entity_mention.json", 'w') as data: json.dump(contents, data) urls = {} urls['mention'] = mention return render_template('entity_mention.html', contents=urls) elif "ad_trust:" in entity: print(entity) line = entity.strip().split(":") entities = line[1].split(",") source = entities[0] target = entities[1] cutoff = entities[2] print(source, target, cutoff) contents = get_paths(source, target, cutoff, '') with open("static/data/entity_entity_paths.json", 'w') as data: json.dump(contents, data) paths = {} paths['source'] = str(source) paths['target'] = str(target) paths['cutoff'] = str(cutoff) print(contents) if int(cutoff) == 1: paths['trust'] = get_trust_value_1(source, target) elif len(contents['links']) == 0: paths['trust'] = 0.0 else: paths['trust'] = get_trust_value(source, target, cutoff) print(paths) return render_template('entity_entity_paths.html', contents=paths) elif "nlp:" in entity: print(entity) line = entity.strip().split(":") statement = line[1] contents = lexer(statement) with open("static/data/nlp.json", 'w') as data: json.dump(contents, data) return render_template('nlp.html') elif 'emotion' in entity: print(entity) line = entity.strip().split(":") statement = line[1] mydata = emotion(statement) return render_template('sentiment.html', contents=mydata) elif 'weibo' in entity: line = entity.strip().split(":") username = line[1] print("username:"******"user_id:", user_id) if user_id == None: return "请确保输入的用户名正确无误~" contents, username = generate_weibo_user_graph(user_id=int(user_id)) print(user_id) if contents == '': return "抱歉,因你过于帅气,我自己都凌乱了。" with open("static/data/weibo.json", 'w') as data: json.dump(contents, data) mydata = {} mydata['user_id'] = user_id mydata['username'] = username return render_template('weibo_graph.html', contents=mydata) elif 'network' in entity: line = entity.strip().split(":") entity = line[1] if entity == "特朗普": entity = "唐纳德·特朗普" if entity == '李小勇': entity = '李小勇[北京邮电大学教授]' file_name = "entity_attribution_{}.json" file_path = "static/data/entity_attr/" + file_name.format(entity) result = os.path.exists(file_path) baidu = {} baike_url = get_baike_url(entity) # 百度百科的url baidu['url'] = baike_url baidu['entity'] = entity baidu[ 'url'] = "http://47.105.58.24:8989/" + "static/data/links/" + file_name.format( entity) if result: print("已经有这个文件了") return render_template('entity_attribution.html', contents=baidu) contents, baike_url_2 = KG_View_2(entity) if '李小勇' in entity: contents = generate_lxy_contents() if len(contents) == 0: return "输入的实体暂不支持,请检查合法性后重新输入。" if entity == "唐纳德·特朗普": entity = "特朗普" related_urls = [] #related_urls = get_related_urls(entity) if entity == "特朗普": entity = "唐纳德·特朗普" related_urls.append(baike_url) print("baike_url:", baike_url) with open("static/data/links/" + file_name.format(entity), 'w') as data: for url in related_urls: data.write(url + "\n") data.close() with open("static/data/entity_attr/" + file_name.format(entity), 'w') as data: json.dump(contents, data) return render_template('entity_attribution.html', contents=baidu) elif "fb_social" in entity: print(entity) line = entity.split(':') params = line[1].split(",") entity_id = params[0] entity_name = get_entity_name(entity_id) contents = advogato_data_KG_source('freebase', entity_name, int(params[1])) print(contents) with open("static/data/entity_relationships.json", 'w') as data: json.dump(contents, data) name = {} name['entity'] = str(entity_name) name['level'] = str(params[1]) print(name) return render_template('entity_relationships.html', contents=name) elif "fb_relation" in entity: print(entity) line = entity.strip().split(":") entities = line[1].split(",") source = entities[0] target = entities[1] # cutoff = entities[2] print(source, target) contents = get_paths(source, target, 1, "freebase") infos = get_freebase_info(source, target) with open("static/data/entity_entity_paths.json", 'w') as data: json.dump(contents, data) paths = {} paths['source'] = str(source) paths['target'] = str(target) paths['cutoff'] = 1 paths['trust'] = round(float(infos.get('trust', '0')), 3) paths['max_trust'] = infos.get('max_trust_relation', '') print(paths) return render_template('freebase_relation_trust.html', contents=paths) else: return "输入格式不合法,请检查后重新输入"
def show_demo(): print("请求方式为:", request.method) entity = request.args.to_dict().get('entity', '') # mention = request.args.to_dict().get('mention','') if entity == '': return render_template('demo.html') elif "source:" in entity: print(entity) line = entity.split(':') params = line[1].split(",") contents = advogato_data_KG_source('source', params[0], int(params[1])) print(contents) with open("static/data/entity_relationships.json", 'w') as data: json.dump(contents, data) # name = {"entity":entity, "level":params[1]} name = {} name['entity'] = str(params[0]) name['level'] = str(params[1]) print(name) return render_template('entity_relationships.html', contents=name) elif "target:" in entity: print(entity) line = entity.split(':') contents = advogato_data_KG_target('target', line[1]) print(contents) return render_template('index.html', contents=contents) elif "mention:" in entity: print(entity) line = entity.strip().split(":") mention = line[1] contents = mention2entity(mention) print(contents) with open("static/data/entity_mention.json", 'w') as data: json.dump(contents, data) urls = {} urls['mention'] = mention return render_template('entity_mention.html', contents=urls) elif "attr:" in entity: print(entity) line = entity.strip().split(":") user = line[1] contents = get_page_rank(user) print(contents) return render_template('index.html', contents=contents) elif "path:" in entity: print(entity) line = entity.strip().split(":") entities = line[1].split(",") source = entities[0] target = entities[1] cutoff = entities[2] print(source, target, cutoff) contents = get_paths(source, target, cutoff) with open("static/data/entity_entity_paths.json", 'w') as data: json.dump(contents, data) paths = {} paths['source'] = str(source) paths['target'] = str(target) paths['cutoff'] = str(cutoff) print(paths) return render_template('entity_entity_paths.html', contents=paths) elif "nlp:" in entity: print(entity) line = entity.strip().split(":") statement = line[1] contents = lexer(statement) # 百度的api # slu = Analysis(statement) # data = slu.analysis() # 解析 # print(data) # 总的结果 # print('--------------------------') # print("分词:", slu.cws) # 分词 # print("词性标注", slu.pos) # 词性标注 # print("命名实体识别", slu.ner) # 命名实体识别 # print("领域分类", slu.domain) # 领域分类 # print("意图识别", slu.intent) # 意图识别 # print("槽填充", slu.slot) # 槽填充 # contents = {} # contents["type"] = "force" # categories = [] # no_repeat_categories = set() # for category in slu.pos: # no_repeat_categories.add(category) # no_repeat_categories = list(no_repeat_categories) # categories_to_int = {} # cnt = 0 # for category in no_repeat_categories: # node = {'name':category, 'keyword':{}, 'base':category} # categories.append(node) # categories_to_int[category] = cnt # cnt += 1 # node = {'name':'文本语句', 'keyword':{}, 'base':'文本语句'} # categories.append(node) # categories_to_int['文本语句'] = cnt # contents['categories'] = categories # nodes = [] # links = [] # node = {'name': statement,'value': statement,'category': categories_to_int['文本语句']} # nodes.append(node) # cnt = 0 # for word in slu.cws: # node = {'name':word, 'value':str(word), 'category':categories_to_int[slu.pos[cnt]]} # link = {'source':0, 'target':cnt+1, 'value':slu.pos[cnt]} # nodes.append(node) # links.append(link) # cnt += 1 # contents['nodes'] = nodes # contents['links'] = links with open("static/data/nlp.json", 'w') as data: json.dump(contents, data) return render_template('nlp.html') elif 'emotion' in entity: print(entity) line = entity.strip().split(":") statement = line[1] mydata = emotion(statement) # results = sentiment(statement) # positive = [] # negative = [] # print(results) # for model in models: # result = results[model_to_name[model]] # print(result) # positive.append(result[0][0]) # negative.append(result[0][1]) # mydata = {} # mydata['positive'] = positive # mydata['negative'] = negative # mydata['statement'] = statement return render_template('sentiment.html', contents=mydata) elif 'weibo' in entity: print(entity) line = entity.strip().split(":") user_id = line[1] contents, username = generate_weibo_user_graph(user_id=user_id) if contents == '': return "抱歉,因你过于帅气,我自己都凌乱了。" with open("static/data/weibo.json", 'w') as data: json.dump(contents, data) mydata = {} mydata['user_id'] = user_id mydata['username'] = username return render_template('weibo_graph.html', contents=mydata) else: file_name = "entity_attribution_{}.json" file_path = "static/data/entity_attr/" + file_name.format(entity) result = os.path.exists(file_path) baidu = {} baike_url = get_baike_url(entity) # 百度百科的url baidu['url'] = baike_url baidu['entity'] = entity if result: print("已经有这个文件了") return render_template('entity_attribution.html', contents=baidu) contents, baike_url = KG_View_2(entity) print(contents) print(baike_url) with open("static/data/entity_attr/" + file_name.format(entity), 'w') as data: json.dump(contents, data) return render_template('entity_attribution.html', contents=baidu)