Exemplo n.º 1
0
 def __init__(self, model_file, sess):
     self.sess = sess
     if not model_file:
         self.bert_model = None
     else:
         log_server.logging('>>>>>>>> Load Model {}'.format(model_file))
         self.bert_model = tf.saved_model.loader.load(
             self.sess, [tf.saved_model.tag_constants.SERVING], model_file)
Exemplo n.º 2
0
    def __init__(self, model_file, tokenizer_file, max_esssay_len):
        if not model_file:
            self.keras_model = None
        else:
            log_server.logging('>>>>>>>> Load Model {}'.format(model_file))
            self.keras_model = load_model(model_file)

        log_server.logging('>>>>>>>> Load Tokenizer {}'.format(tokenizer_file))
        self.tokenizer = pickle.load(open(tokenizer_file, 'rb'))
        self.max_essay_length = max_esssay_len
Exemplo n.º 3
0
    def infer(self, data, grade):
        """
        机器预测分数接口
        :param data: 作文
        :param grade: 学生年级
        :return: 机器预测分数
        """
        if data is None:
            return 0

        result = self.models[grade].predict(data)
        if '初' in grade:
            result = result / cfg.total_score_junior
        elif '高' in grade:
            result = result / cfg.total_score_senior
        else:
            log_server.logging('>>>>>>>> Invalid Grade !!!!!!!!!!!')
        return result
Exemplo n.º 4
0
    def __init__(self):
        self.grade_list = ["初一", "初二", "初三", "初四", "高一", "高二", "高三"]
        self.model_map = {
            '初一': JuniorOnePredictor,
            '初二': JuniorTwoPredictor,
            '初三': JuniorThreePredictor,
            '初四': JuniorThreePredictor,
            '高一': SeniorOneoPredictor,
            '高二': SeniorTwoPredictor,
            '高三': SeniorThreePredictor
        }
        self.models = dict()
        load_model_start = time.time()
        for grade in self.grade_list:
            self.models[grade] = self.model_map[grade]()

        load_model_end = time.time()
        load_model_total = load_model_end - load_model_start
        log_server.logging(
            '>>>>>>>> Load Model Total Time: {}'.format(load_model_total))
Exemplo n.º 5
0
def AES_post():
    log_server.logging('============AES Begin!===========')
    content = request.values.get('post_content')
    grade = request.values.get('grade')

    if grade in list(grade_dict.keys()):
        start = time.time()
        AD = AdvancedWords()

        # 模型预测分数方法(predict代表机评分)
        try:
            # TODO: predict = model.infer(......) (DL predict)
            predict = model.infer(content, grade)
        except Exception as e:
            predict = 0
        
        # 高亮词提取和高亮词数
        try:
            highlight_site, high_num = AD.find_highlight_site(content, grade)
        except Exception as e:
            highlight_site = []
        
        predict_ = {'predict': predict, 'highlight': highlight_site}
        log_server.logging('>>>>>>>> Time of Predict: {:.2f}'.format(time.time()-start))
        log_server.logging('>>>>>>>> Predict Score: {}'.format(predict))
        log_server.logging("===============AES Over!!!==============" + "\n")
        return jsonify(predict_)
    else:
        predict, highlight_site = 0, []
        predict_ = {'predict': predict, 'highlight': highlight_site}
        log_server.logging('>>>>>>>> Invalid Grade')
        log_server.logging("===============AES Over!!!==============" + "\n")
        return jsonify(predict_)