parser.add_argument("--lr", type=float, default=0.00005, help="learning rate") parser.add_argument("--model_no", type=int, default=0, help="Model ID") parser.add_argument("--train", type=int, default=0, help="0: Don't train, 1: train") parser.add_argument("--infer", type=int, default=1, help="0: Don't infer, 1: Infer") args = parser.parse_args() if args.train == 1: net = train_and_fit(args) if args.infer == 1: inferer = infer_from_trained(args, detect_entities=False) test = "The surprise [E1]visit[/E1] caused a [E2]frenzy[/E2] on the already chaotic trading floor." inferer.infer_sentence(test, detect_entities=False) while True: sent = input( "Type input sentence ('quit' or 'exit' to terminate):\n") if sent.lower() in ['quit', 'exit']: break inferer.infer_sentence(sent, detect_entities=False)
parser.add_argument("--train", type=int, default=1, help="0: Don't train, 1: train") parser.add_argument("--infer", type=int, default=1, help="0: Don't infer, 1: Infer") args = parser.parse_args() if (args.train == 1) and (args.task != 'fewrel'): net = train_and_fit(args) if (args.infer == 1) and (args.task != 'fewrel'): inferer = infer_from_trained(args, detect_entities=True) test = "The surprise [E1]visit[/E1] caused a [E2]frenzy[/E2] on the already chaotic trading floor." inferer.infer_sentence(test, detect_entities=False) test2 = "After eating the chicken, he developed a sore throat the next morning." inferer.infer_sentence(test2, detect_entities=True) while True: sent = input( "Type input sentence ('quit' or 'exit' to terminate):\n") if sent.lower() in ['quit', 'exit']: break inferer.infer_sentence(sent, detect_entities=False) if args.task == 'fewrel': fewrel = FewRel(args) meta_input, e1_e2_start, meta_labels, outputs = fewrel.evaluate()
def make_app(argv, debug=False): app = Flask(__name__) #CORS(app, support_credentials=True) # TODO # @cross_origin(supports_credentials=True) # def login(): # return jsonify({'success': 'ok'}) app.debug = debug parser = Parser().getParser() #argv = sys.argv[1:] args, _ = parser.parse_known_args() # args = parser.parse_args() if (args.train == 1) and (args.task != 'fewrel'): app.net = train_and_fit(args) if (args.infer == 1) and (args.task != 'fewrel'): app.inferer = infer_from_trained(args.model_path, detect_entities=True, args=args) if args.task == 'fewrel': fewrel = FewRel(args.model_path, args) meta_input, e1_e2_start, meta_labels, outputs = fewrel.evaluate() def find_best_prediction(out): best_pred = max(out, key=itemgetter(2)) return best_pred[0], best_pred[1], best_pred[2] def get_best_predictions(data, inputtype="simplejson"): if inputtype == "simplejson": sen_name = "sentext" elif inputtype == "texoo": sen_name = "text" for line in data: #logger.info("sentence"+ str(line[sen_name])) out = app.inferer.infer_sentence(line[sen_name], detect_entities=True) logger.info("out: " + str(out)) line["sentence"], line["pred"], line[ "prob"] = find_best_prediction(out) return data def get_all_predictions(data, inputtype="simplejson"): new_data = [] if inputtype == "simplejson": sen_name = "sentext" elif inputtype == "texoo": sen_name = "text" for line in data: logger.info("sentence" + str(line[sen_name])) out = app.inferer.infer_sentence(line[sen_name], detect_entities=True) logger.info("out: " + str(out)) if len(out) == 0: logger.info("test") line["sentence"], line["pred"], line["prob"] = None, None, None new_data.append(line) else: for pred in out: logger.info("pred : " + str(pred)) newline = copy.deepcopy(line) newline["sentence"], newline["pred"], newline[ "prob"] = pred[0], pred[1], pred[2] logger.info("newline : " + str(newline)) new_data.append(newline) logger.info("new _DATA : \n" + str(new_data) + "\n") return new_data # takes new texoo json for each line # ignores annotations and generates new ones @app.route('/api/importtexoo', methods=['POST']) def get_input_importtexoo(): print("request.data: ", request.data) logger.info("request.data:" + str(request.data)) jsonInput = request.get_json(force=True) # jsonInput = { "options": {"returnAllPredictions": False}, # "data": [ # {'length': 12, # 'documentRef': 2, # 'uid': 123, # 'text': "I love Easter Sunday as a fashion moment because every church goer is ready to praise while dressed to the nines in their best Spring-inspired looks .", # 'begin': 0, # 'class': "thisClass", # 'type': "type", # "tokens": None, # "empty": None, # "language": "ENg", # "sentences": None, # 'source': "source", # 'id': None, # "title": "Output for bert-relex-api.demo.datexis.com", # # "annotations": [ # {"relationArguments": [ # {"arg1": "blq"},{"arg2": "ble"}]}, # {"relationArguments": [ # {"arg1": "blj"}, {"arg2": "blg"}]} # ] # } # ] # } if jsonInput["options"]["returnAllPredictions"]: data = get_all_predictions(jsonInput["data"], "texoo") else: data = get_best_predictions(jsonInput["data"], "texoo") return make_result_json(data, "texoo") @app.route('/api/importjson', methods=['POST']) def get_input_importjson(): print("request.data: ", request.data) logger.info("request.data:" + str(request.data)) jsonInput = request.get_json(force=True) #jsonInput = '{ "options": {"returnAllPredictions": false},' \ # '"data": [' \ # '{"sentext": "I love Easter Sunday as a fashion moment because every church goer is ready to praise while dressed to the nines in their best Spring-inspired looks ."},' \ # ' {"sentext": "Wear them with basics and sparse accessories ."}' \ # ']}' #jsonInput = json.loads(jsonInput) if jsonInput["options"]["returnAllPredictions"]: data = get_all_predictions(jsonInput["data"]) else: data = get_best_predictions(jsonInput["data"]) return make_result_json(data) return app